Importing Libraries¶

In [61]:
import os
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
import librosa
import librosa.display
import IPython.display as ipd
from keras.callbacks import Callback
import seaborn as sns
import soundfile as sf
from scipy.io import wavfile as wav
from tqdm import tqdm
from sklearn.model_selection import train_test_split
from sklearn.preprocessing import LabelEncoder
from keras.utils import to_categorical
%matplotlib inline
In [2]:
meta_data = pd.read_csv("UrbanSound8K.csv")  

np.shape(meta_data) # (8732, 8)
meta_data.head()
Out[2]:
slice_file_name fsID start end salience fold classID class
0 100032-3-0-0.wav 100032 0.0 0.317551 1 5 3 dog_bark
1 100263-2-0-117.wav 100263 58.5 62.500000 1 5 2 children_playing
2 100263-2-0-121.wav 100263 60.5 64.500000 1 5 2 children_playing
3 100263-2-0-126.wav 100263 63.0 67.000000 1 5 2 children_playing
4 100263-2-0-137.wav 100263 68.5 72.500000 1 5 2 children_playing
In [3]:
sns.set(style="darkgrid")
sns.countplot(y= meta_data['class'],orient='v')
plt.show()
In [5]:
fig = plt.figure(figsize=(15,15))
fig.subplots_adjust(hspace = 0.5, wspace = 0.5)
classes = list(meta_data['class'].unique()) 
plt.title('Waveplots for different classes of sounds\n',size=20)
plt.axis('off')
for i in range(10):
    fig.add_subplot(5,2,i+1)
    plt.title(classes[i])
    plt.ylabel('Amplitude')
    file_name = 'sample_sound/'+classes[i]+".wav"       
    data,sample_rate = librosa.load(file_name)
    librosa.display.waveshow(data,sr=sample_rate)
    plt.tight_layout()
In [7]:
# Listening module for a sound class
i = 7
filename = 'sample_sound/'+classes[i]+".wav"
plt.figure(figsize=(10,3))
plt.title(classes[i])
data,sample_rate = librosa.load(filename)
librosa.display.waveshow(data,sr=sample_rate)
ipd.Audio(filename)
Out[7]:
Your browser does not support the audio element.
In [9]:
# Waveplot of original sound without getting normalized
from scipy.io import wavfile as wav
wave_sample_rate,wave_audio =wav.read(filename)
plt.figure(figsize=(12,5))
plt.plot(wave_audio)
Out[9]:
[<matplotlib.lines.Line2D at 0x25315386510>,
 <matplotlib.lines.Line2D at 0x253153a3450>]

1.2 Extracting the metadata of the audio¶

In [10]:
audio_d_list = []

for index, row in meta_data.iterrows():
    
    file_name = os.path.join(os.path.abspath('Audio_dataset/'),
                             'fold'+ str(row["fold"])+'/',
                             str(row["slice_file_name"]))
    
    # extracting channels, sampling rate, bit rate and duration
    data = sf.SoundFile(file_name)
    duration = float(meta_data.iloc[index,3])-float(meta_data.iloc[index,2])
    audio_d_list.append([data.channels,
                         data.samplerate,
                         data.subtype[4:],
                         duration])

audio_property = pd.DataFrame(audio_d_list, 
                          columns=['num_channels','sample_rate','bit_depth','duration'])
In [11]:
audio_property.head()
Out[11]:
num_channels sample_rate bit_depth duration
0 2 44100 16 0.317551
1 2 44100 16 4.000000
2 2 44100 16 4.000000
3 2 44100 16 4.000000
4 2 44100 16 4.000000

2. Inital Data Analysis¶

Spectogram for different classes¶

In [13]:
fig = plt.figure(figsize=(20, 8))
fig.subplots_adjust(hspace = 0.5, wspace = 0.5)
classes = list(meta_data['class'].unique()) 
plt.title('Spectogram for different classes of sounds\n',size=30)
plt.axis('off')
for i in range(10):
    fig.add_subplot(2,5,i+1)
    plt.title(classes[i])
    file_name = 'sample_sound/'+classes[i]+".wav"
    data,sample_rate = librosa.load(file_name)
    D = librosa.amplitude_to_db(np.abs(librosa.stft(data)), ref=np.max)
    librosa.display.specshow(D, x_axis = "time",y_axis='log')
    plt.colorbar(format='%+2.0f dB')
    plt.tight_layout()

2.1 Duration of Audio¶

In [14]:
print(audio_property.duration)
0       0.317551
1       4.000000
2       4.000000
3       4.000000
4       4.000000
          ...   
8727    4.000000
8728    2.142545
8729    3.505983
8730    2.532098
8731    2.532099
Name: duration, Length: 8732, dtype: float64

2.2 Number of Channels¶

Sound Channel refers to the independent audio signal which is collected or playback when the sound is recording or playback in different spatial position. Therefore, the number of channel is the amount of sound source when the sound is recording or the relevant speaker number when it is playback

In [15]:
print(audio_property.num_channels.value_counts(normalize=True))
2    0.915369
1    0.084631
Name: num_channels, dtype: float64

2.3 Bit Depth¶

Bit depth describes the resolution of the sound data that is captured and stored in an audio file. A higher audio bit depth indicates a more detailed sound recording.

In [16]:
print(audio_property.bit_depth.value_counts(normalize=True))
16       0.659414
24       0.315277
T        0.019354
U8       0.004924
DPCM     0.000916
ADPCM    0.000115
Name: bit_depth, dtype: float64

2.4 Sampling Frequency or Sample Rate¶

In audio production, a sample rate (or "sampling frequency") defines how many times per second a sound is sampled. Technically speaking, it is the frequency of samples used in a digital recording.

In [17]:
print(audio_property.sample_rate.value_counts(normalize=True))
44100     0.614979
48000     0.286532
96000     0.069858
24000     0.009391
16000     0.005153
22050     0.005039
11025     0.004466
192000    0.001947
8000      0.001374
11024     0.000802
32000     0.000458
Name: sample_rate, dtype: float64

3. Data Pre-Processing¶

Using Librosa’s load() function:

  • Converts the sampling rate to 22.05 KHz
  • Normalise the Bit-depth values of the audio
  • Merge the audio channels to Mono (a single channel)

Using Librosa MFCC function:

  • Extracting Mel Frequency Cepstral Coefficient
In [152]:
def feature_extractor(file_name,scale,padding,max_pad,n):
    '''
    Function to extract features from the audio file
    Does the following things using Librosa library:
        - Converts the sampling rate to 22.05 KHz
        - Normalize the Bit-depth values of the audio
        - Merge the audio channels into a Mono (single) channel
    Parameters: 
    Input:
        file_name : string; 
                    Path of the file to be processed 
        scale : False; 
                True when 1D features are required calculated 
                by taking mean along the axis
        padding : False; 
                  True when 2D features are required with padding
        max_pad : None [int]; 
                  Maxium size of a padded image/array. 
                  Required when padding is set to True
        n = 40 [int]; 
            Number of MFCCs to return          
    Output:
        mfccs = array of mfccs features.
                (1D when scaling = True
                 2D when padding = True)   
    '''
    try:        
        audio, sample_rate = librosa.load(file_name, res_type='kaiser_fast') 
        
        mfccs = librosa.feature.mfcc(y = audio, sr = sample_rate, n_mfcc = n) 
            
        if padding:
            pad_width = max_pad - mfccs.shape[1]
            mfccs = np.pad(mfccs, pad_width=((0, 0), (0, pad_width)), mode='constant')
            
        if scale: 
            mfccs = np.mean(mfccs.T,axis=0)
        
    except Exception as e:
        print("Error encountered while parsing file: ", file_name)
        return None 
     
    return mfccs

1-Dimensional Feature Extraction¶

In [19]:
feat_list = []
for index, row in meta_data.iterrows():
    
    file_name = os.path.join(os.path.abspath('Audio_dataset/'),
                             'fold'+str(row["fold"])+'/',
                             str(row["slice_file_name"]))
    class_label = row["class"]
    
    data = feature_extractor(file_name,scale=True)
    feat_list.append([data, class_label])

features_1D = pd.DataFrame(feat_list, columns=['feature','class_label'])

print('Processed ', len(features_1D), ' files')
print('Output Feature shape ',np.shape(features_1D.iloc[0,0]))
C:\Users\daksh\anaconda3\Lib\site-packages\librosa\core\spectrum.py:256: UserWarning: n_fft=2048 is too large for input signal of length=1323
  warnings.warn(
C:\Users\daksh\anaconda3\Lib\site-packages\librosa\core\spectrum.py:256: UserWarning: n_fft=2048 is too large for input signal of length=1103
  warnings.warn(
C:\Users\daksh\anaconda3\Lib\site-packages\librosa\core\spectrum.py:256: UserWarning: n_fft=2048 is too large for input signal of length=1523
  warnings.warn(
Processed  8732  files
Output Feature shape  (40,)
In [24]:
features_1D.to_pickle("features_1D.pkl")

2-Dimensional Feature Extraction with and without Padding¶

In [20]:
feat_list = []
feat_list_padded = []
for index, row in meta_data.iterrows():
    
    file_name = os.path.join(os.path.abspath('Audio_dataset/'),
                             'fold'+ str(row["fold"])+'/',
                             str(row["slice_file_name"]))
    class_label = row["class"]
    class_id = row["classID"]
    
    data = feature_extractor(file_name)
    feat_list.append([data, class_id])
    
    data = feature_extractor(file_name,padding=True,max_pad = 174)
    feat_list_padded.append([data, class_label])
    
features_wo_pad = pd.DataFrame(feat_list, columns=['feature','class_label'])
features_2D = pd.DataFrame(feat_list_padded, columns=['feature','class_label'])

print('Processed ', len(features_2D), ' files')
print('Output Feature Padded shape ',np.shape(features_2D.iloc[0,0]))
C:\Users\daksh\anaconda3\Lib\site-packages\librosa\core\spectrum.py:256: UserWarning: n_fft=2048 is too large for input signal of length=1323
  warnings.warn(
C:\Users\daksh\anaconda3\Lib\site-packages\librosa\core\spectrum.py:256: UserWarning: n_fft=2048 is too large for input signal of length=1103
  warnings.warn(
C:\Users\daksh\anaconda3\Lib\site-packages\librosa\core\spectrum.py:256: UserWarning: n_fft=2048 is too large for input signal of length=1523
  warnings.warn(
Processed  8732  files
Output Feature Padded shape  (40, 174)
In [25]:
features_wo_pad.to_pickle("features_wo_pad.pkl")
features_2D.to_pickle("features_2D.pkl")
In [26]:
features_1D = pd.read_pickle("features_1D.pkl")
features_2D = pd.read_pickle("features_2D.pkl")
features_wo_pad = pd.read_pickle("features_wo_pad.pkl")

Data Visualization after MFCC¶

In [27]:
# choose a sample of sound from each class

sound_class_dic = meta_data.groupby(['class', 'classID']).size()
sound_sample = []

for c in range(10):
    for index, row in features_wo_pad.iterrows():
        if c == row['class_label']:
            sound_sample.append(row)
            break
In [28]:
fig = plt.figure(figsize=(20, 8))
fig.subplots_adjust(hspace = 0.5, wspace = 0.5)
classes = list(meta_data['class'].unique()) 
plt.title('MFCC Spectogram for different classes of sounds\n',size=20)
plt.axis('off')
for i in range(10):
    fig.add_subplot(2,5,i+1)
    plt.title(classes[i])
    plt.ylabel('Amplitude')
    file_name = 'sample_sound/'+classes[i]+".wav"
    mfcc_ = feature_extractor(file_name)
    #mfcc_ = feature_extractor(file_name,padding=True,max_pad=174)
    librosa.display.specshow(mfcc_,x_axis='time', y_axis='log')
    plt.colorbar(format='%+2.0f dB')
    plt.tight_layout()

Label Encoding¶

In [29]:
# Convert features and corresponding classification labels into numpy arrays
X_1D = np.array(features_1D.feature.tolist())
y_1D = np.array(features_1D.class_label.tolist())

# Encode the classification labels
label_encoder = LabelEncoder()
yy_1D = to_categorical(label_encoder.fit_transform(y_1D))
In [30]:
# Convert features and corresponding classification labels into numpy arrays
X_2D = np.array(features_2D.feature.tolist())
y_2D = np.array(features_2D.class_label.tolist())

# Encode the classification labels
yy_2D = to_categorical(label_encoder.transform(y_2D)) 

Splitting the Dataset¶

In [31]:
x_train_sl_1D, x_test_sl_1D, y_train_sl_1D, y_test_sl_1D = train_test_split(X_1D,y_1D,test_size=0.2,random_state =27)

# x_train_sl_2D, x_test_sl_2D, y_train_sl_2D, y_test_sl_2D = train_test_split(
#     X_2D.reshape(X_2D.shape[0],X_2D.shape[1]*X_2D.shape[2]),yy_2D,test_size=0.2,random_state = 42)

x_train_1D, x_test_1D, y_train_1D, y_test_1D = train_test_split(X_1D, yy_1D, test_size=0.2, random_state = 27)
x_train_2D, x_test_2D, y_train_2D, y_test_2D = train_test_split(X_2D, yy_2D, test_size=0.2, random_state = 27)

Classification using Different Algorithms¶

Following will be used for classification:

  1. Random Forest
  2. Support Vector Machine
  3. Mulit-layer Perceptron (ANN)
  4. Convolutional Neural Network (CNN)

1. Random Forest Classification¶

In [32]:
from sklearn.ensemble import RandomForestClassifier
from sklearn.datasets import make_classification
In [33]:
clf = RandomForestClassifier(max_depth=4, random_state=0,n_estimators=100)
clf.fit(x_train_sl_1D,y_train_sl_1D)
y_pred = clf.predict(x_test_sl_1D)
train_accu =  clf.score(x_train_sl_1D, y_train_sl_1D)
test_accu = clf.score(x_test_sl_1D, y_test_sl_1D)
print('Training Accuracy:', train_accu)
print('Test Accuracy:', test_accu)
Training Accuracy: 0.5712240515390121
Test Accuracy: 0.5334859759587864

!! Not so Good !!¶

2. Support Vector Machine¶

In [35]:
import pickle
from sklearn import svm
from sklearn.metrics import roc_curve
from sklearn.metrics import roc_auc_score
from sklearn.metrics import confusion_matrix
from sklearn.utils.multiclass import unique_labels
from sklearn.model_selection import GridSearchCV
import warnings
warnings.filterwarnings('ignore')

Grid Search for hyper parameter selection¶

In [36]:
#search the hyperparameter space to find the best model
tuned_parameters = [{'kernel': ['rbf'], 
                     'gamma': [1e-3, 1e-4, 1e-5],
                     'C': [1, 10 ,20,30,40,50]}]
#                    , {'kernel': ['linear'], 'C': [1, 10, 100, 1000]}]
searchpara = GridSearchCV(svm.SVC(), tuned_parameters)
searchpara.fit(x_train_sl_1D, y_train_sl_1D)
searchpara.best_params_
Out[36]:
{'C': 50, 'gamma': 0.0001, 'kernel': 'rbf'}

Model Execution¶

In [37]:
# construct the model by the best hyperparameters found above
SVM = svm.SVC(C=50.0, gamma=0.001)

# train the model
SVM.fit(x_train_sl_1D,y_train_sl_1D)

y_pred = SVM.predict(x_test_sl_1D)
In [38]:
train_accu =  SVM.score(x_train_sl_1D, y_train_sl_1D)
test_accu = SVM.score(x_test_sl_1D, y_test_sl_1D)

print('Training Accuracy: %.4f%%'% (train_accu*100))
print('Test Accuracy: %.4f%%'% (test_accu*100))
Training Accuracy: 99.8712%
Test Accuracy: 89.8111%

Confusion Matrix¶

In [39]:
def plot_confusion_matrix(y_true, y_pred, classes,
                          normalize=False,
                          title=None,
                          cmap=plt.cm.Blues):
    """
    This function prints and plots the confusion matrix.
    Normalization can be applied by setting `normalize=True`.
    """
    if not title:
        if normalize:
            title = 'Normalized confusion matrix'
        else:
            title = 'Confusion matrix, without normalization'

    # Compute confusion matrix
    cm = confusion_matrix(y_true, y_pred)
    
    fig, ax = plt.subplots(figsize = (8,8))
    im = ax.imshow(cm, interpolation='nearest', cmap=cmap)
    ax.figure.colorbar(im, ax=ax)

    ax.set(xticks=np.arange(cm.shape[1]),
           yticks=np.arange(cm.shape[0]),
           xticklabels=classes, yticklabels=classes,
           title=title,
           ylabel='True label',
           xlabel='Predicted label')

    # Rotate the tick labels and set their alignment.
    plt.setp(ax.get_xticklabels(), rotation=45, ha="right",
             rotation_mode="anchor")

    # Loop over data dimensions and create text annotations.
    fmt = '.2f' if normalize else 'd'
    thresh = cm.max() / 2.
    for i in range(cm.shape[0]):
        for j in range(cm.shape[1]):
            ax.text(j, i, format(cm[i, j], fmt),
                    ha="center", va="center",
                    color="white" if cm[i, j] > thresh else "black")
    fig.tight_layout()
    return ax
In [40]:
class_names = meta_data.groupby(['class', 'classID'], as_index = False).sum()['class']
plot_confusion_matrix(y_test_sl_1D, y_pred, classes=class_names, cmap=plt.cm.Blues)
plt.show()

3. Multi-Layer perceptron¶

In [42]:
import numpy as np
from keras.models import Sequential
from keras.layers import Dense, Dropout, Activation, Flatten
from keras.layers import Convolution2D, Conv2D, MaxPooling2D, GlobalAveragePooling2D
from keras.layers import Convolution2D, MaxPooling2D
from keras.optimizers import Adam
from keras.utils import to_categorical
from sklearn import metrics 
from keras.callbacks import ModelCheckpoint 
from datetime import datetime 
from sklearn.preprocessing import LabelEncoder
from keras.utils import to_categorical
from sklearn.model_selection import train_test_split 
In [78]:
n_labels = yy_1D.shape[1]
in_shape = x_train_1D.shape[1]
n_layers = 2
n_nodes = 100

model_num = 4

# Construct model 
model = Sequential()

model.add(Dense(n_nodes, input_shape=(in_shape,)))
model.add(Activation('relu'))
model.add(Dropout(0.5))

model.add(Dense(n_nodes))
model.add(Activation('relu'))
model.add(Dropout(0.5))

model.add(Dense(n_labels))
model.add(Activation('softmax'))

model.compile(loss='categorical_crossentropy', metrics=['accuracy'], optimizer='adam')

Model Summary¶

In [79]:
model.summary()

# Calculate pre-training accuracy 
score = model.evaluate(x_train_1D, y_train_1D, verbose=0)
accuracy = 100*score[1] 

print("Pre-training accuracy: %.4f%%" % accuracy)
Model: "sequential_3"
_________________________________________________________________
 Layer (type)                Output Shape              Param #   
=================================================================
 dense_9 (Dense)             (None, 100)               4100      
                                                                 
 activation_9 (Activation)   (None, 100)               0         
                                                                 
 dropout_6 (Dropout)         (None, 100)               0         
                                                                 
 dense_10 (Dense)            (None, 100)               10100     
                                                                 
 activation_10 (Activation)  (None, 100)               0         
                                                                 
 dropout_7 (Dropout)         (None, 100)               0         
                                                                 
 dense_11 (Dense)            (None, 10)                1010      
                                                                 
 activation_11 (Activation)  (None, 10)                0         
                                                                 
=================================================================
Total params: 15210 (59.41 KB)
Trainable params: 15210 (59.41 KB)
Non-trainable params: 0 (0.00 Byte)
_________________________________________________________________
Pre-training accuracy: 12.0115%

Model Execution¶

In [80]:
n_epochs = 100
n_batch = 32

start = datetime.now()

history = model.fit(x_train_1D, 
                    y_train_1D,
                    batch_size=n_batch,
                    epochs=n_epochs,
                    validation_data=(x_test_1D, y_test_1D),
                    verbose=1)

duration = datetime.now() - start
print("Training finished in time: ", duration)
Epoch 1/100
219/219 [==============================] - 1s 2ms/step - loss: 12.5780 - accuracy: 0.1681 - val_loss: 2.0046 - val_accuracy: 0.3097
Epoch 2/100
219/219 [==============================] - 0s 2ms/step - loss: 2.7194 - accuracy: 0.2301 - val_loss: 1.9956 - val_accuracy: 0.3314
Epoch 3/100
219/219 [==============================] - 0s 2ms/step - loss: 2.1957 - accuracy: 0.2674 - val_loss: 1.8807 - val_accuracy: 0.3749
Epoch 4/100
219/219 [==============================] - 0s 2ms/step - loss: 2.0279 - accuracy: 0.3111 - val_loss: 1.7475 - val_accuracy: 0.4493
Epoch 5/100
219/219 [==============================] - 0s 2ms/step - loss: 1.8812 - accuracy: 0.3569 - val_loss: 1.6417 - val_accuracy: 0.4814
Epoch 6/100
219/219 [==============================] - 0s 2ms/step - loss: 1.7719 - accuracy: 0.3956 - val_loss: 1.5838 - val_accuracy: 0.4991
Epoch 7/100
219/219 [==============================] - 0s 2ms/step - loss: 1.7203 - accuracy: 0.4202 - val_loss: 1.4969 - val_accuracy: 0.5255
Epoch 8/100
219/219 [==============================] - 0s 2ms/step - loss: 1.6524 - accuracy: 0.4445 - val_loss: 1.4528 - val_accuracy: 0.5575
Epoch 9/100
219/219 [==============================] - 0s 2ms/step - loss: 1.6034 - accuracy: 0.4563 - val_loss: 1.3910 - val_accuracy: 0.5793
Epoch 10/100
219/219 [==============================] - 0s 2ms/step - loss: 1.5557 - accuracy: 0.4700 - val_loss: 1.3422 - val_accuracy: 0.5884
Epoch 11/100
219/219 [==============================] - 0s 2ms/step - loss: 1.4870 - accuracy: 0.4958 - val_loss: 1.3057 - val_accuracy: 0.5924
Epoch 12/100
219/219 [==============================] - 0s 2ms/step - loss: 1.4610 - accuracy: 0.5015 - val_loss: 1.2693 - val_accuracy: 0.5850
Epoch 13/100
219/219 [==============================] - 0s 2ms/step - loss: 1.4534 - accuracy: 0.5094 - val_loss: 1.2626 - val_accuracy: 0.5976
Epoch 14/100
219/219 [==============================] - 0s 2ms/step - loss: 1.4118 - accuracy: 0.5180 - val_loss: 1.2119 - val_accuracy: 0.6234
Epoch 15/100
219/219 [==============================] - 0s 2ms/step - loss: 1.3842 - accuracy: 0.5327 - val_loss: 1.2044 - val_accuracy: 0.6234
Epoch 16/100
219/219 [==============================] - 0s 2ms/step - loss: 1.3463 - accuracy: 0.5370 - val_loss: 1.1748 - val_accuracy: 0.6371
Epoch 17/100
219/219 [==============================] - 0s 2ms/step - loss: 1.3389 - accuracy: 0.5403 - val_loss: 1.1498 - val_accuracy: 0.6394
Epoch 18/100
219/219 [==============================] - 0s 2ms/step - loss: 1.2876 - accuracy: 0.5562 - val_loss: 1.1162 - val_accuracy: 0.6560
Epoch 19/100
219/219 [==============================] - 0s 2ms/step - loss: 1.2946 - accuracy: 0.5581 - val_loss: 1.1172 - val_accuracy: 0.6560
Epoch 20/100
219/219 [==============================] - 0s 2ms/step - loss: 1.2570 - accuracy: 0.5744 - val_loss: 1.0794 - val_accuracy: 0.6600
Epoch 21/100
219/219 [==============================] - 0s 2ms/step - loss: 1.2606 - accuracy: 0.5778 - val_loss: 1.0660 - val_accuracy: 0.6726
Epoch 22/100
219/219 [==============================] - 0s 2ms/step - loss: 1.2390 - accuracy: 0.5810 - val_loss: 1.0439 - val_accuracy: 0.6789
Epoch 23/100
219/219 [==============================] - 0s 2ms/step - loss: 1.2198 - accuracy: 0.5810 - val_loss: 1.0328 - val_accuracy: 0.6691
Epoch 24/100
219/219 [==============================] - 0s 2ms/step - loss: 1.2240 - accuracy: 0.5841 - val_loss: 1.0464 - val_accuracy: 0.6651
Epoch 25/100
219/219 [==============================] - 0s 2ms/step - loss: 1.2069 - accuracy: 0.5956 - val_loss: 1.0067 - val_accuracy: 0.6961
Epoch 26/100
219/219 [==============================] - 0s 2ms/step - loss: 1.1954 - accuracy: 0.5891 - val_loss: 1.0037 - val_accuracy: 0.6863
Epoch 27/100
219/219 [==============================] - 0s 2ms/step - loss: 1.1897 - accuracy: 0.6056 - val_loss: 0.9769 - val_accuracy: 0.6966
Epoch 28/100
219/219 [==============================] - 0s 2ms/step - loss: 1.1827 - accuracy: 0.5980 - val_loss: 0.9875 - val_accuracy: 0.6875
Epoch 29/100
219/219 [==============================] - 0s 2ms/step - loss: 1.1441 - accuracy: 0.6057 - val_loss: 0.9743 - val_accuracy: 0.6966
Epoch 30/100
219/219 [==============================] - 0s 2ms/step - loss: 1.1577 - accuracy: 0.6046 - val_loss: 0.9525 - val_accuracy: 0.7109
Epoch 31/100
219/219 [==============================] - 0s 2ms/step - loss: 1.1360 - accuracy: 0.6179 - val_loss: 0.9546 - val_accuracy: 0.6961
Epoch 32/100
219/219 [==============================] - 0s 2ms/step - loss: 1.1392 - accuracy: 0.6119 - val_loss: 0.9558 - val_accuracy: 0.7046
Epoch 33/100
219/219 [==============================] - 0s 2ms/step - loss: 1.1341 - accuracy: 0.6107 - val_loss: 0.9561 - val_accuracy: 0.6829
Epoch 34/100
219/219 [==============================] - 0s 2ms/step - loss: 1.1087 - accuracy: 0.6192 - val_loss: 0.9315 - val_accuracy: 0.7041
Epoch 35/100
219/219 [==============================] - 0s 2ms/step - loss: 1.1519 - accuracy: 0.6082 - val_loss: 0.9393 - val_accuracy: 0.7109
Epoch 36/100
219/219 [==============================] - 0s 2ms/step - loss: 1.0963 - accuracy: 0.6256 - val_loss: 0.9250 - val_accuracy: 0.7132
Epoch 37/100
219/219 [==============================] - 0s 2ms/step - loss: 1.0897 - accuracy: 0.6291 - val_loss: 0.9183 - val_accuracy: 0.7132
Epoch 38/100
219/219 [==============================] - 0s 2ms/step - loss: 1.0940 - accuracy: 0.6203 - val_loss: 0.8933 - val_accuracy: 0.7149
Epoch 39/100
219/219 [==============================] - 0s 2ms/step - loss: 1.0795 - accuracy: 0.6308 - val_loss: 0.9014 - val_accuracy: 0.7115
Epoch 40/100
219/219 [==============================] - 0s 2ms/step - loss: 1.0821 - accuracy: 0.6299 - val_loss: 0.9146 - val_accuracy: 0.7109
Epoch 41/100
219/219 [==============================] - 0s 2ms/step - loss: 1.0714 - accuracy: 0.6329 - val_loss: 0.8945 - val_accuracy: 0.7041
Epoch 42/100
219/219 [==============================] - 0s 2ms/step - loss: 1.0629 - accuracy: 0.6309 - val_loss: 0.8828 - val_accuracy: 0.7167
Epoch 43/100
219/219 [==============================] - 0s 2ms/step - loss: 1.0728 - accuracy: 0.6414 - val_loss: 0.8669 - val_accuracy: 0.7252
Epoch 44/100
219/219 [==============================] - 0s 2ms/step - loss: 1.0434 - accuracy: 0.6417 - val_loss: 0.8562 - val_accuracy: 0.7218
Epoch 45/100
219/219 [==============================] - 0s 2ms/step - loss: 1.0409 - accuracy: 0.6421 - val_loss: 0.8629 - val_accuracy: 0.7247
Epoch 46/100
219/219 [==============================] - 0s 2ms/step - loss: 1.0516 - accuracy: 0.6398 - val_loss: 0.8489 - val_accuracy: 0.7275
Epoch 47/100
219/219 [==============================] - 0s 2ms/step - loss: 1.0489 - accuracy: 0.6451 - val_loss: 0.8673 - val_accuracy: 0.7167
Epoch 48/100
219/219 [==============================] - 0s 2ms/step - loss: 1.0473 - accuracy: 0.6362 - val_loss: 0.8406 - val_accuracy: 0.7373
Epoch 49/100
219/219 [==============================] - 0s 2ms/step - loss: 1.0286 - accuracy: 0.6504 - val_loss: 0.8383 - val_accuracy: 0.7270
Epoch 50/100
219/219 [==============================] - 0s 2ms/step - loss: 1.0204 - accuracy: 0.6515 - val_loss: 0.8405 - val_accuracy: 0.7270
Epoch 51/100
219/219 [==============================] - 0s 2ms/step - loss: 1.0427 - accuracy: 0.6527 - val_loss: 0.8292 - val_accuracy: 0.7373
Epoch 52/100
219/219 [==============================] - 0s 2ms/step - loss: 1.0097 - accuracy: 0.6523 - val_loss: 0.8220 - val_accuracy: 0.7338
Epoch 53/100
219/219 [==============================] - 0s 2ms/step - loss: 1.0374 - accuracy: 0.6498 - val_loss: 0.8176 - val_accuracy: 0.7481
Epoch 54/100
219/219 [==============================] - 0s 2ms/step - loss: 0.9992 - accuracy: 0.6611 - val_loss: 0.8161 - val_accuracy: 0.7470
Epoch 55/100
219/219 [==============================] - 0s 2ms/step - loss: 0.9982 - accuracy: 0.6647 - val_loss: 0.8388 - val_accuracy: 0.7344
Epoch 56/100
219/219 [==============================] - 0s 2ms/step - loss: 1.0177 - accuracy: 0.6535 - val_loss: 0.8282 - val_accuracy: 0.7338
Epoch 57/100
219/219 [==============================] - 0s 2ms/step - loss: 1.0043 - accuracy: 0.6553 - val_loss: 0.8154 - val_accuracy: 0.7396
Epoch 58/100
219/219 [==============================] - 0s 2ms/step - loss: 1.0077 - accuracy: 0.6588 - val_loss: 0.8253 - val_accuracy: 0.7464
Epoch 59/100
219/219 [==============================] - 0s 2ms/step - loss: 1.0020 - accuracy: 0.6588 - val_loss: 0.8136 - val_accuracy: 0.7338
Epoch 60/100
219/219 [==============================] - 0s 2ms/step - loss: 1.0039 - accuracy: 0.6511 - val_loss: 0.8128 - val_accuracy: 0.7384
Epoch 61/100
219/219 [==============================] - 0s 2ms/step - loss: 0.9857 - accuracy: 0.6606 - val_loss: 0.7929 - val_accuracy: 0.7384
Epoch 62/100
219/219 [==============================] - 0s 2ms/step - loss: 0.9760 - accuracy: 0.6686 - val_loss: 0.7910 - val_accuracy: 0.7418
Epoch 63/100
219/219 [==============================] - 0s 2ms/step - loss: 0.9788 - accuracy: 0.6620 - val_loss: 0.7956 - val_accuracy: 0.7533
Epoch 64/100
219/219 [==============================] - 0s 2ms/step - loss: 0.9810 - accuracy: 0.6690 - val_loss: 0.7928 - val_accuracy: 0.7510
Epoch 65/100
219/219 [==============================] - 0s 2ms/step - loss: 0.9629 - accuracy: 0.6674 - val_loss: 0.7810 - val_accuracy: 0.7476
Epoch 66/100
219/219 [==============================] - 0s 2ms/step - loss: 0.9831 - accuracy: 0.6641 - val_loss: 0.8078 - val_accuracy: 0.7333
Epoch 67/100
219/219 [==============================] - 0s 2ms/step - loss: 0.9681 - accuracy: 0.6696 - val_loss: 0.8109 - val_accuracy: 0.7275
Epoch 68/100
219/219 [==============================] - 0s 2ms/step - loss: 0.9710 - accuracy: 0.6653 - val_loss: 0.8009 - val_accuracy: 0.7396
Epoch 69/100
219/219 [==============================] - 0s 2ms/step - loss: 0.9502 - accuracy: 0.6759 - val_loss: 0.7908 - val_accuracy: 0.7413
Epoch 70/100
219/219 [==============================] - 0s 2ms/step - loss: 0.9738 - accuracy: 0.6670 - val_loss: 0.7984 - val_accuracy: 0.7384
Epoch 71/100
219/219 [==============================] - 0s 2ms/step - loss: 0.9809 - accuracy: 0.6624 - val_loss: 0.7876 - val_accuracy: 0.7481
Epoch 72/100
219/219 [==============================] - 0s 2ms/step - loss: 0.9542 - accuracy: 0.6700 - val_loss: 0.7687 - val_accuracy: 0.7436
Epoch 73/100
219/219 [==============================] - 0s 2ms/step - loss: 0.9455 - accuracy: 0.6732 - val_loss: 0.7727 - val_accuracy: 0.7499
Epoch 74/100
219/219 [==============================] - 0s 2ms/step - loss: 0.9734 - accuracy: 0.6649 - val_loss: 0.7809 - val_accuracy: 0.7401
Epoch 75/100
219/219 [==============================] - 0s 2ms/step - loss: 0.9468 - accuracy: 0.6812 - val_loss: 0.7567 - val_accuracy: 0.7613
Epoch 76/100
219/219 [==============================] - 0s 2ms/step - loss: 0.9379 - accuracy: 0.6754 - val_loss: 0.7688 - val_accuracy: 0.7493
Epoch 77/100
219/219 [==============================] - 0s 2ms/step - loss: 0.9397 - accuracy: 0.6816 - val_loss: 0.7466 - val_accuracy: 0.7556
Epoch 78/100
219/219 [==============================] - 0s 2ms/step - loss: 0.9383 - accuracy: 0.6730 - val_loss: 0.7620 - val_accuracy: 0.7533
Epoch 79/100
219/219 [==============================] - 0s 2ms/step - loss: 0.9475 - accuracy: 0.6809 - val_loss: 0.7570 - val_accuracy: 0.7579
Epoch 80/100
219/219 [==============================] - 0s 2ms/step - loss: 0.9647 - accuracy: 0.6639 - val_loss: 0.7554 - val_accuracy: 0.7510
Epoch 81/100
219/219 [==============================] - 0s 2ms/step - loss: 0.9300 - accuracy: 0.6786 - val_loss: 0.7537 - val_accuracy: 0.7510
Epoch 82/100
219/219 [==============================] - 0s 2ms/step - loss: 0.9370 - accuracy: 0.6772 - val_loss: 0.7638 - val_accuracy: 0.7401
Epoch 83/100
219/219 [==============================] - 0s 2ms/step - loss: 0.9314 - accuracy: 0.6817 - val_loss: 0.7516 - val_accuracy: 0.7562
Epoch 84/100
219/219 [==============================] - 0s 2ms/step - loss: 0.9562 - accuracy: 0.6752 - val_loss: 0.7637 - val_accuracy: 0.7539
Epoch 85/100
219/219 [==============================] - 0s 2ms/step - loss: 0.9247 - accuracy: 0.6766 - val_loss: 0.7523 - val_accuracy: 0.7521
Epoch 86/100
219/219 [==============================] - 0s 2ms/step - loss: 0.9329 - accuracy: 0.6826 - val_loss: 0.7452 - val_accuracy: 0.7521
Epoch 87/100
219/219 [==============================] - 0s 2ms/step - loss: 0.9192 - accuracy: 0.6793 - val_loss: 0.7452 - val_accuracy: 0.7436
Epoch 88/100
219/219 [==============================] - 0s 2ms/step - loss: 0.9215 - accuracy: 0.6839 - val_loss: 0.7443 - val_accuracy: 0.7516
Epoch 89/100
219/219 [==============================] - 0s 2ms/step - loss: 0.9046 - accuracy: 0.6833 - val_loss: 0.7283 - val_accuracy: 0.7590
Epoch 90/100
219/219 [==============================] - 0s 2ms/step - loss: 0.9212 - accuracy: 0.6826 - val_loss: 0.7361 - val_accuracy: 0.7619
Epoch 91/100
219/219 [==============================] - 0s 2ms/step - loss: 0.9342 - accuracy: 0.6772 - val_loss: 0.7479 - val_accuracy: 0.7642
Epoch 92/100
219/219 [==============================] - 0s 2ms/step - loss: 0.9222 - accuracy: 0.6832 - val_loss: 0.7349 - val_accuracy: 0.7693
Epoch 93/100
219/219 [==============================] - 0s 2ms/step - loss: 0.9153 - accuracy: 0.6799 - val_loss: 0.7391 - val_accuracy: 0.7527
Epoch 94/100
219/219 [==============================] - 0s 2ms/step - loss: 0.9116 - accuracy: 0.6827 - val_loss: 0.7260 - val_accuracy: 0.7619
Epoch 95/100
219/219 [==============================] - 0s 2ms/step - loss: 0.8983 - accuracy: 0.6929 - val_loss: 0.7193 - val_accuracy: 0.7642
Epoch 96/100
219/219 [==============================] - 0s 2ms/step - loss: 0.9175 - accuracy: 0.6812 - val_loss: 0.7529 - val_accuracy: 0.7459
Epoch 97/100
219/219 [==============================] - 0s 2ms/step - loss: 0.9322 - accuracy: 0.6850 - val_loss: 0.7519 - val_accuracy: 0.7613
Epoch 98/100
219/219 [==============================] - 0s 2ms/step - loss: 0.9071 - accuracy: 0.6869 - val_loss: 0.7319 - val_accuracy: 0.7624
Epoch 99/100
219/219 [==============================] - 0s 2ms/step - loss: 0.9105 - accuracy: 0.6915 - val_loss: 0.7286 - val_accuracy: 0.7647
Epoch 100/100
219/219 [==============================] - 0s 2ms/step - loss: 0.9149 - accuracy: 0.6889 - val_loss: 0.7168 - val_accuracy: 0.7613
Training finished in time:  0:00:40.335810

Model Evalutaion¶

In [81]:
# Evaluating the model on the training and testing set
acc_train = model.evaluate(x_train_1D, y_train_1D, verbose=0)
print("Training Accuracy: ", acc_train[1])

acc_test = model.evaluate(x_test_1D, y_test_1D, verbose=0)
print("Testing Accuracy: ", acc_test[1])
Training Accuracy:  0.8210451006889343
Testing Accuracy:  0.7613050937652588
In [96]:
def plot_accuracy(history,n_epochs,n_batch):
    training_accuracy = history.history['accuracy']
    testing_accuracy = history.history['val_accuracy']  # Assuming you used 'val_accuracy' for testing accuracy

# Create a list of epoch numbers
    epochs = range(1, n_epochs + 1)

# Plot both training and testing accuracy
    plt.plot(epochs, training_accuracy, 'b', label='Training Accuracy')
    plt.plot(epochs, testing_accuracy, 'r', label='Testing Accuracy')
    plt.title('Accuracy vs Epochs')
    plt.xlabel('Epochs')
    plt.ylabel('Accuracy')
    plt.legend()
    plt.show()
    
In [104]:
def plot_losses(history,n_epochs):
    train_losses = history.history['loss']
    test_losses = history.history['val_loss']  # Assuming you used 'val_loss' for validation loss

# Create a list of epoch numbers
    epochs = range(1, len(train_losses) + 1)

# Plot training and validation losses
    plt.plot(epochs, train_losses, 'b', label='Training Loss')
    plt.plot(epochs, test_losses, 'r', label='Validation Loss')
    plt.title('Training and Testing Loss vs Epochs')
    plt.xlabel('Epochs')
    plt.ylabel('Loss')
    plt.legend()
    plt.show()

Case 1: Epochs=100, No_batch=32¶

In [97]:
plot_accuracy(history,n_epochs,32)
In [ ]:
 
In [ ]:
 

Model Execution after doing some changes in Hyperparameters¶

  1. Set n_epoch= 200
  2. num_of_batch= 64
In [91]:
model_3 = Sequential()

model_3.add(Dense(n_nodes, input_shape=(in_shape,)))
model_3.add(Activation('relu'))
model_3.add(Dropout(0.5))

model_3.add(Dense(n_nodes))
model_3.add(Activation('relu'))
model_3.add(Dropout(0.5))

model_3.add(Dense(n_labels))
model_3.add(Activation('softmax'))

model_3.compile(loss='categorical_crossentropy', metrics=['accuracy'], optimizer='adam')
In [92]:
n_epochs_2=200
n_batch_2=16
start = datetime.now()

history = model_3.fit(x_train_1D, 
                    y_train_1D,
                    batch_size=n_batch_2,
                    epochs=n_epochs_2,
                    validation_data=(x_test_1D, y_test_1D),
                    verbose=1)

duration = datetime.now() - start
print("Training finished in time: ", duration)
Epoch 1/200
437/437 [==============================] - 1s 2ms/step - loss: 9.9619 - accuracy: 0.1482 - val_loss: 2.1924 - val_accuracy: 0.1540
Epoch 2/200
437/437 [==============================] - 1s 1ms/step - loss: 2.4366 - accuracy: 0.1585 - val_loss: 2.1705 - val_accuracy: 0.1998
Epoch 3/200
437/437 [==============================] - 1s 1ms/step - loss: 2.2086 - accuracy: 0.2113 - val_loss: 2.0403 - val_accuracy: 0.2685
Epoch 4/200
437/437 [==============================] - 1s 2ms/step - loss: 2.0906 - accuracy: 0.2517 - val_loss: 1.9333 - val_accuracy: 0.3331
Epoch 5/200
437/437 [==============================] - 1s 1ms/step - loss: 1.9878 - accuracy: 0.2775 - val_loss: 1.8224 - val_accuracy: 0.3578
Epoch 6/200
437/437 [==============================] - 1s 1ms/step - loss: 1.8943 - accuracy: 0.3152 - val_loss: 1.7304 - val_accuracy: 0.4001
Epoch 7/200
437/437 [==============================] - 1s 1ms/step - loss: 1.8242 - accuracy: 0.3396 - val_loss: 1.6243 - val_accuracy: 0.4322
Epoch 8/200
437/437 [==============================] - 1s 1ms/step - loss: 1.7620 - accuracy: 0.3601 - val_loss: 1.5546 - val_accuracy: 0.4791
Epoch 9/200
437/437 [==============================] - 1s 1ms/step - loss: 1.6750 - accuracy: 0.4059 - val_loss: 1.4558 - val_accuracy: 0.5054
Epoch 10/200
437/437 [==============================] - 1s 2ms/step - loss: 1.6208 - accuracy: 0.4319 - val_loss: 1.4183 - val_accuracy: 0.5358
Epoch 11/200
437/437 [==============================] - 1s 1ms/step - loss: 1.5447 - accuracy: 0.4583 - val_loss: 1.3594 - val_accuracy: 0.5718
Epoch 12/200
437/437 [==============================] - 1s 1ms/step - loss: 1.5139 - accuracy: 0.4747 - val_loss: 1.2894 - val_accuracy: 0.5867
Epoch 13/200
437/437 [==============================] - 1s 1ms/step - loss: 1.4425 - accuracy: 0.4995 - val_loss: 1.3046 - val_accuracy: 0.5770
Epoch 14/200
437/437 [==============================] - 1s 1ms/step - loss: 1.4209 - accuracy: 0.5064 - val_loss: 1.2083 - val_accuracy: 0.6050
Epoch 15/200
437/437 [==============================] - 1s 1ms/step - loss: 1.3855 - accuracy: 0.5284 - val_loss: 1.1875 - val_accuracy: 0.6050
Epoch 16/200
437/437 [==============================] - 1s 1ms/step - loss: 1.3570 - accuracy: 0.5261 - val_loss: 1.1722 - val_accuracy: 0.6113
Epoch 17/200
437/437 [==============================] - 1s 1ms/step - loss: 1.3275 - accuracy: 0.5435 - val_loss: 1.1406 - val_accuracy: 0.6394
Epoch 18/200
437/437 [==============================] - 1s 1ms/step - loss: 1.2885 - accuracy: 0.5493 - val_loss: 1.0723 - val_accuracy: 0.6554
Epoch 19/200
437/437 [==============================] - 1s 1ms/step - loss: 1.2837 - accuracy: 0.5573 - val_loss: 1.0897 - val_accuracy: 0.6388
Epoch 20/200
437/437 [==============================] - 1s 2ms/step - loss: 1.2587 - accuracy: 0.5724 - val_loss: 1.0526 - val_accuracy: 0.6640
Epoch 21/200
437/437 [==============================] - 1s 1ms/step - loss: 1.2478 - accuracy: 0.5724 - val_loss: 1.0464 - val_accuracy: 0.6646
Epoch 22/200
437/437 [==============================] - 1s 2ms/step - loss: 1.2317 - accuracy: 0.5780 - val_loss: 1.0351 - val_accuracy: 0.6600
Epoch 23/200
437/437 [==============================] - 1s 2ms/step - loss: 1.2059 - accuracy: 0.5863 - val_loss: 0.9953 - val_accuracy: 0.6835
Epoch 24/200
437/437 [==============================] - 1s 2ms/step - loss: 1.1812 - accuracy: 0.5908 - val_loss: 0.9994 - val_accuracy: 0.6800
Epoch 25/200
437/437 [==============================] - 1s 2ms/step - loss: 1.1484 - accuracy: 0.6063 - val_loss: 0.9770 - val_accuracy: 0.6789
Epoch 26/200
437/437 [==============================] - 1s 1ms/step - loss: 1.1527 - accuracy: 0.6053 - val_loss: 0.9488 - val_accuracy: 0.7012
Epoch 27/200
437/437 [==============================] - 1s 1ms/step - loss: 1.1473 - accuracy: 0.6060 - val_loss: 0.9336 - val_accuracy: 0.7046
Epoch 28/200
437/437 [==============================] - 1s 1ms/step - loss: 1.1467 - accuracy: 0.6126 - val_loss: 0.9282 - val_accuracy: 0.6961
Epoch 29/200
437/437 [==============================] - 1s 2ms/step - loss: 1.1452 - accuracy: 0.6175 - val_loss: 0.9261 - val_accuracy: 0.6949
Epoch 30/200
437/437 [==============================] - 1s 2ms/step - loss: 1.1235 - accuracy: 0.6122 - val_loss: 0.9123 - val_accuracy: 0.7001
Epoch 31/200
437/437 [==============================] - 1s 2ms/step - loss: 1.1045 - accuracy: 0.6202 - val_loss: 0.8977 - val_accuracy: 0.7132
Epoch 32/200
437/437 [==============================] - 1s 2ms/step - loss: 1.1008 - accuracy: 0.6245 - val_loss: 0.9018 - val_accuracy: 0.7029
Epoch 33/200
437/437 [==============================] - 1s 1ms/step - loss: 1.0949 - accuracy: 0.6249 - val_loss: 0.8856 - val_accuracy: 0.7144
Epoch 34/200
437/437 [==============================] - 1s 1ms/step - loss: 1.0980 - accuracy: 0.6311 - val_loss: 0.8738 - val_accuracy: 0.7149
Epoch 35/200
437/437 [==============================] - 1s 1ms/step - loss: 1.0660 - accuracy: 0.6321 - val_loss: 0.8429 - val_accuracy: 0.7235
Epoch 36/200
437/437 [==============================] - 1s 1ms/step - loss: 1.0930 - accuracy: 0.6328 - val_loss: 0.8514 - val_accuracy: 0.7264
Epoch 37/200
437/437 [==============================] - 1s 1ms/step - loss: 1.0715 - accuracy: 0.6321 - val_loss: 0.8491 - val_accuracy: 0.7201
Epoch 38/200
437/437 [==============================] - 1s 1ms/step - loss: 1.0426 - accuracy: 0.6427 - val_loss: 0.8751 - val_accuracy: 0.7195
Epoch 39/200
437/437 [==============================] - 1s 2ms/step - loss: 1.0667 - accuracy: 0.6349 - val_loss: 0.8372 - val_accuracy: 0.7207
Epoch 40/200
437/437 [==============================] - 1s 1ms/step - loss: 1.0654 - accuracy: 0.6366 - val_loss: 0.8339 - val_accuracy: 0.7275
Epoch 41/200
437/437 [==============================] - 1s 1ms/step - loss: 1.0338 - accuracy: 0.6523 - val_loss: 0.8306 - val_accuracy: 0.7367
Epoch 42/200
437/437 [==============================] - 1s 1ms/step - loss: 1.0526 - accuracy: 0.6424 - val_loss: 0.8430 - val_accuracy: 0.7264
Epoch 43/200
437/437 [==============================] - 1s 2ms/step - loss: 1.0404 - accuracy: 0.6467 - val_loss: 0.8435 - val_accuracy: 0.7350
Epoch 44/200
437/437 [==============================] - 1s 1ms/step - loss: 1.0521 - accuracy: 0.6407 - val_loss: 0.8198 - val_accuracy: 0.7401
Epoch 45/200
437/437 [==============================] - 1s 2ms/step - loss: 1.0201 - accuracy: 0.6518 - val_loss: 0.8396 - val_accuracy: 0.7281
Epoch 46/200
437/437 [==============================] - 1s 1ms/step - loss: 1.0442 - accuracy: 0.6485 - val_loss: 0.8298 - val_accuracy: 0.7350
Epoch 47/200
437/437 [==============================] - 1s 2ms/step - loss: 1.0224 - accuracy: 0.6533 - val_loss: 0.8242 - val_accuracy: 0.7224
Epoch 48/200
437/437 [==============================] - 1s 2ms/step - loss: 1.0336 - accuracy: 0.6624 - val_loss: 0.8165 - val_accuracy: 0.7310
Epoch 49/200
437/437 [==============================] - 1s 2ms/step - loss: 1.0145 - accuracy: 0.6620 - val_loss: 0.8093 - val_accuracy: 0.7315
Epoch 50/200
437/437 [==============================] - 1s 1ms/step - loss: 1.0061 - accuracy: 0.6555 - val_loss: 0.8122 - val_accuracy: 0.7333
Epoch 51/200
437/437 [==============================] - 1s 1ms/step - loss: 1.0078 - accuracy: 0.6623 - val_loss: 0.8009 - val_accuracy: 0.7464
Epoch 52/200
437/437 [==============================] - 1s 1ms/step - loss: 1.0033 - accuracy: 0.6571 - val_loss: 0.8028 - val_accuracy: 0.7441
Epoch 53/200
437/437 [==============================] - 1s 1ms/step - loss: 1.0028 - accuracy: 0.6591 - val_loss: 0.8304 - val_accuracy: 0.7275
Epoch 54/200
437/437 [==============================] - 1s 2ms/step - loss: 1.0017 - accuracy: 0.6557 - val_loss: 0.7900 - val_accuracy: 0.7350
Epoch 55/200
437/437 [==============================] - 1s 1ms/step - loss: 0.9987 - accuracy: 0.6584 - val_loss: 0.7971 - val_accuracy: 0.7424
Epoch 56/200
437/437 [==============================] - 1s 2ms/step - loss: 0.9953 - accuracy: 0.6628 - val_loss: 0.7932 - val_accuracy: 0.7338
Epoch 57/200
437/437 [==============================] - 1s 2ms/step - loss: 1.0013 - accuracy: 0.6617 - val_loss: 0.7953 - val_accuracy: 0.7441
Epoch 58/200
437/437 [==============================] - 1s 1ms/step - loss: 0.9672 - accuracy: 0.6716 - val_loss: 0.7649 - val_accuracy: 0.7527
Epoch 59/200
437/437 [==============================] - 1s 1ms/step - loss: 0.9794 - accuracy: 0.6614 - val_loss: 0.7906 - val_accuracy: 0.7413
Epoch 60/200
437/437 [==============================] - 1s 2ms/step - loss: 0.9981 - accuracy: 0.6639 - val_loss: 0.7792 - val_accuracy: 0.7436
Epoch 61/200
437/437 [==============================] - 1s 1ms/step - loss: 0.9876 - accuracy: 0.6670 - val_loss: 0.7571 - val_accuracy: 0.7401
Epoch 62/200
437/437 [==============================] - 1s 2ms/step - loss: 0.9770 - accuracy: 0.6727 - val_loss: 0.7845 - val_accuracy: 0.7436
Epoch 63/200
437/437 [==============================] - 1s 1ms/step - loss: 0.9852 - accuracy: 0.6681 - val_loss: 0.7910 - val_accuracy: 0.7493
Epoch 64/200
437/437 [==============================] - 1s 1ms/step - loss: 0.9589 - accuracy: 0.6770 - val_loss: 0.7799 - val_accuracy: 0.7544
Epoch 65/200
437/437 [==============================] - 1s 2ms/step - loss: 0.9777 - accuracy: 0.6680 - val_loss: 0.7779 - val_accuracy: 0.7350
Epoch 66/200
437/437 [==============================] - 1s 1ms/step - loss: 0.9812 - accuracy: 0.6673 - val_loss: 0.7628 - val_accuracy: 0.7533
Epoch 67/200
437/437 [==============================] - 1s 2ms/step - loss: 0.9752 - accuracy: 0.6730 - val_loss: 0.7823 - val_accuracy: 0.7470
Epoch 68/200
437/437 [==============================] - 1s 2ms/step - loss: 0.9804 - accuracy: 0.6699 - val_loss: 0.7445 - val_accuracy: 0.7619
Epoch 69/200
437/437 [==============================] - 1s 2ms/step - loss: 0.9674 - accuracy: 0.6684 - val_loss: 0.7546 - val_accuracy: 0.7670
Epoch 70/200
437/437 [==============================] - 1s 2ms/step - loss: 0.9723 - accuracy: 0.6727 - val_loss: 0.7679 - val_accuracy: 0.7550
Epoch 71/200
437/437 [==============================] - 1s 2ms/step - loss: 0.9706 - accuracy: 0.6681 - val_loss: 0.7638 - val_accuracy: 0.7682
Epoch 72/200
437/437 [==============================] - 1s 2ms/step - loss: 0.9564 - accuracy: 0.6737 - val_loss: 0.7742 - val_accuracy: 0.7499
Epoch 73/200
437/437 [==============================] - 1s 1ms/step - loss: 0.9501 - accuracy: 0.6785 - val_loss: 0.7622 - val_accuracy: 0.7630
Epoch 74/200
437/437 [==============================] - 1s 1ms/step - loss: 0.9583 - accuracy: 0.6773 - val_loss: 0.7530 - val_accuracy: 0.7544
Epoch 75/200
437/437 [==============================] - 1s 1ms/step - loss: 0.9739 - accuracy: 0.6709 - val_loss: 0.7670 - val_accuracy: 0.7550
Epoch 76/200
437/437 [==============================] - 1s 1ms/step - loss: 0.9756 - accuracy: 0.6750 - val_loss: 0.7613 - val_accuracy: 0.7516
Epoch 77/200
437/437 [==============================] - 1s 2ms/step - loss: 0.9464 - accuracy: 0.6773 - val_loss: 0.7707 - val_accuracy: 0.7556
Epoch 78/200
437/437 [==============================] - 1s 2ms/step - loss: 0.9421 - accuracy: 0.6769 - val_loss: 0.7329 - val_accuracy: 0.7636
Epoch 79/200
437/437 [==============================] - 1s 2ms/step - loss: 0.9630 - accuracy: 0.6757 - val_loss: 0.7445 - val_accuracy: 0.7596
Epoch 80/200
437/437 [==============================] - 1s 1ms/step - loss: 0.9511 - accuracy: 0.6742 - val_loss: 0.7286 - val_accuracy: 0.7773
Epoch 81/200
437/437 [==============================] - 1s 1ms/step - loss: 0.9386 - accuracy: 0.6832 - val_loss: 0.7682 - val_accuracy: 0.7436
Epoch 82/200
437/437 [==============================] - 1s 2ms/step - loss: 0.9212 - accuracy: 0.6838 - val_loss: 0.7404 - val_accuracy: 0.7630
Epoch 83/200
437/437 [==============================] - 1s 2ms/step - loss: 0.9793 - accuracy: 0.6723 - val_loss: 0.7497 - val_accuracy: 0.7567
Epoch 84/200
437/437 [==============================] - 1s 2ms/step - loss: 0.9286 - accuracy: 0.6829 - val_loss: 0.7444 - val_accuracy: 0.7539
Epoch 85/200
437/437 [==============================] - 1s 1ms/step - loss: 0.9402 - accuracy: 0.6819 - val_loss: 0.7347 - val_accuracy: 0.7584
Epoch 86/200
437/437 [==============================] - 1s 1ms/step - loss: 0.9340 - accuracy: 0.6879 - val_loss: 0.7473 - val_accuracy: 0.7441
Epoch 87/200
437/437 [==============================] - 1s 1ms/step - loss: 0.9458 - accuracy: 0.6797 - val_loss: 0.7381 - val_accuracy: 0.7642
Epoch 88/200
437/437 [==============================] - 1s 1ms/step - loss: 0.9221 - accuracy: 0.6853 - val_loss: 0.7373 - val_accuracy: 0.7607
Epoch 89/200
437/437 [==============================] - 1s 1ms/step - loss: 0.9140 - accuracy: 0.6859 - val_loss: 0.7482 - val_accuracy: 0.7590
Epoch 90/200
437/437 [==============================] - 1s 1ms/step - loss: 0.9291 - accuracy: 0.6873 - val_loss: 0.7345 - val_accuracy: 0.7682
Epoch 91/200
437/437 [==============================] - 1s 1ms/step - loss: 0.9354 - accuracy: 0.6830 - val_loss: 0.7169 - val_accuracy: 0.7716
Epoch 92/200
437/437 [==============================] - 1s 1ms/step - loss: 0.9271 - accuracy: 0.6797 - val_loss: 0.7062 - val_accuracy: 0.7722
Epoch 93/200
437/437 [==============================] - 1s 1ms/step - loss: 0.9313 - accuracy: 0.6825 - val_loss: 0.7360 - val_accuracy: 0.7584
Epoch 94/200
437/437 [==============================] - 1s 1ms/step - loss: 0.9332 - accuracy: 0.6826 - val_loss: 0.7314 - val_accuracy: 0.7693
Epoch 95/200
437/437 [==============================] - 1s 1ms/step - loss: 0.9296 - accuracy: 0.6812 - val_loss: 0.7228 - val_accuracy: 0.7699
Epoch 96/200
437/437 [==============================] - 1s 1ms/step - loss: 0.9359 - accuracy: 0.6817 - val_loss: 0.7366 - val_accuracy: 0.7636
Epoch 97/200
437/437 [==============================] - 1s 1ms/step - loss: 0.9423 - accuracy: 0.6777 - val_loss: 0.7305 - val_accuracy: 0.7590
Epoch 98/200
437/437 [==============================] - 1s 1ms/step - loss: 0.9150 - accuracy: 0.6901 - val_loss: 0.7372 - val_accuracy: 0.7579
Epoch 99/200
437/437 [==============================] - 1s 1ms/step - loss: 0.9225 - accuracy: 0.6892 - val_loss: 0.7422 - val_accuracy: 0.7579
Epoch 100/200
437/437 [==============================] - 1s 1ms/step - loss: 0.9311 - accuracy: 0.6883 - val_loss: 0.7354 - val_accuracy: 0.7556
Epoch 101/200
437/437 [==============================] - 1s 1ms/step - loss: 0.9406 - accuracy: 0.6882 - val_loss: 0.7453 - val_accuracy: 0.7596
Epoch 102/200
437/437 [==============================] - 1s 1ms/step - loss: 0.9218 - accuracy: 0.6853 - val_loss: 0.7224 - val_accuracy: 0.7693
Epoch 103/200
437/437 [==============================] - 1s 1ms/step - loss: 0.9037 - accuracy: 0.6905 - val_loss: 0.7042 - val_accuracy: 0.7768
Epoch 104/200
437/437 [==============================] - 1s 1ms/step - loss: 0.9244 - accuracy: 0.6931 - val_loss: 0.7055 - val_accuracy: 0.7705
Epoch 105/200
437/437 [==============================] - 1s 1ms/step - loss: 0.9261 - accuracy: 0.6911 - val_loss: 0.7096 - val_accuracy: 0.7710
Epoch 106/200
437/437 [==============================] - 1s 1ms/step - loss: 0.9340 - accuracy: 0.6852 - val_loss: 0.7301 - val_accuracy: 0.7630
Epoch 107/200
437/437 [==============================] - 1s 1ms/step - loss: 0.9171 - accuracy: 0.6923 - val_loss: 0.7028 - val_accuracy: 0.7728
Epoch 108/200
437/437 [==============================] - 1s 1ms/step - loss: 0.8986 - accuracy: 0.6989 - val_loss: 0.7114 - val_accuracy: 0.7693
Epoch 109/200
437/437 [==============================] - 1s 1ms/step - loss: 0.9233 - accuracy: 0.6893 - val_loss: 0.7068 - val_accuracy: 0.7768
Epoch 110/200
437/437 [==============================] - 1s 1ms/step - loss: 0.9063 - accuracy: 0.6976 - val_loss: 0.6842 - val_accuracy: 0.7939
Epoch 111/200
437/437 [==============================] - 1s 1ms/step - loss: 0.9077 - accuracy: 0.6951 - val_loss: 0.7232 - val_accuracy: 0.7607
Epoch 112/200
437/437 [==============================] - 1s 1ms/step - loss: 0.9103 - accuracy: 0.6921 - val_loss: 0.6987 - val_accuracy: 0.7624
Epoch 113/200
437/437 [==============================] - 1s 1ms/step - loss: 0.9167 - accuracy: 0.6971 - val_loss: 0.7337 - val_accuracy: 0.7544
Epoch 114/200
437/437 [==============================] - 1s 1ms/step - loss: 0.9193 - accuracy: 0.6966 - val_loss: 0.7140 - val_accuracy: 0.7613
Epoch 115/200
437/437 [==============================] - 1s 1ms/step - loss: 0.9166 - accuracy: 0.6903 - val_loss: 0.7119 - val_accuracy: 0.7596
Epoch 116/200
437/437 [==============================] - 1s 1ms/step - loss: 0.9201 - accuracy: 0.6875 - val_loss: 0.7415 - val_accuracy: 0.7573
Epoch 117/200
437/437 [==============================] - 1s 1ms/step - loss: 0.9224 - accuracy: 0.6999 - val_loss: 0.7100 - val_accuracy: 0.7733
Epoch 118/200
437/437 [==============================] - 1s 1ms/step - loss: 0.8951 - accuracy: 0.7034 - val_loss: 0.7055 - val_accuracy: 0.7825
Epoch 119/200
437/437 [==============================] - 1s 1ms/step - loss: 0.8864 - accuracy: 0.7001 - val_loss: 0.7120 - val_accuracy: 0.7745
Epoch 120/200
437/437 [==============================] - 1s 1ms/step - loss: 0.8993 - accuracy: 0.6911 - val_loss: 0.7055 - val_accuracy: 0.7779
Epoch 121/200
437/437 [==============================] - 1s 1ms/step - loss: 0.9095 - accuracy: 0.6921 - val_loss: 0.7111 - val_accuracy: 0.7790
Epoch 122/200
437/437 [==============================] - 1s 2ms/step - loss: 0.9022 - accuracy: 0.7004 - val_loss: 0.6778 - val_accuracy: 0.7848
Epoch 123/200
437/437 [==============================] - 1s 1ms/step - loss: 0.9098 - accuracy: 0.6938 - val_loss: 0.7226 - val_accuracy: 0.7602
Epoch 124/200
437/437 [==============================] - 1s 1ms/step - loss: 0.9153 - accuracy: 0.6974 - val_loss: 0.7266 - val_accuracy: 0.7653
Epoch 125/200
437/437 [==============================] - 1s 1ms/step - loss: 0.9182 - accuracy: 0.6931 - val_loss: 0.7096 - val_accuracy: 0.7762
Epoch 126/200
437/437 [==============================] - 1s 1ms/step - loss: 0.8994 - accuracy: 0.6984 - val_loss: 0.7038 - val_accuracy: 0.7665
Epoch 127/200
437/437 [==============================] - 1s 1ms/step - loss: 0.9033 - accuracy: 0.6929 - val_loss: 0.7100 - val_accuracy: 0.7762
Epoch 128/200
437/437 [==============================] - 1s 1ms/step - loss: 0.9034 - accuracy: 0.6946 - val_loss: 0.7236 - val_accuracy: 0.7533
Epoch 129/200
437/437 [==============================] - 1s 1ms/step - loss: 0.8646 - accuracy: 0.7051 - val_loss: 0.6910 - val_accuracy: 0.7636
Epoch 130/200
437/437 [==============================] - 1s 1ms/step - loss: 0.9057 - accuracy: 0.6931 - val_loss: 0.6915 - val_accuracy: 0.7739
Epoch 131/200
437/437 [==============================] - 1s 1ms/step - loss: 0.8847 - accuracy: 0.6974 - val_loss: 0.7022 - val_accuracy: 0.7790
Epoch 132/200
437/437 [==============================] - 1s 1ms/step - loss: 0.9010 - accuracy: 0.6905 - val_loss: 0.7161 - val_accuracy: 0.7647
Epoch 133/200
437/437 [==============================] - 1s 1ms/step - loss: 0.8961 - accuracy: 0.6982 - val_loss: 0.7090 - val_accuracy: 0.7642
Epoch 134/200
437/437 [==============================] - 1s 1ms/step - loss: 0.8994 - accuracy: 0.6985 - val_loss: 0.6837 - val_accuracy: 0.7773
Epoch 135/200
437/437 [==============================] - 1s 1ms/step - loss: 0.8830 - accuracy: 0.7057 - val_loss: 0.7301 - val_accuracy: 0.7619
Epoch 136/200
437/437 [==============================] - 1s 1ms/step - loss: 0.9032 - accuracy: 0.7006 - val_loss: 0.7083 - val_accuracy: 0.7682
Epoch 137/200
437/437 [==============================] - 1s 1ms/step - loss: 0.8739 - accuracy: 0.7041 - val_loss: 0.7129 - val_accuracy: 0.7533
Epoch 138/200
437/437 [==============================] - 1s 1ms/step - loss: 0.8650 - accuracy: 0.7059 - val_loss: 0.7208 - val_accuracy: 0.7705
Epoch 139/200
437/437 [==============================] - 1s 1ms/step - loss: 0.8976 - accuracy: 0.6925 - val_loss: 0.7124 - val_accuracy: 0.7607
Epoch 140/200
437/437 [==============================] - 1s 1ms/step - loss: 0.8849 - accuracy: 0.7008 - val_loss: 0.6755 - val_accuracy: 0.7705
Epoch 141/200
437/437 [==============================] - 1s 1ms/step - loss: 0.9108 - accuracy: 0.6933 - val_loss: 0.7113 - val_accuracy: 0.7705
Epoch 142/200
437/437 [==============================] - 1s 1ms/step - loss: 0.9011 - accuracy: 0.7004 - val_loss: 0.7048 - val_accuracy: 0.7722
Epoch 143/200
437/437 [==============================] - 1s 1ms/step - loss: 0.8940 - accuracy: 0.7015 - val_loss: 0.6901 - val_accuracy: 0.7745
Epoch 144/200
437/437 [==============================] - 1s 2ms/step - loss: 0.8990 - accuracy: 0.6949 - val_loss: 0.6810 - val_accuracy: 0.7705
Epoch 145/200
437/437 [==============================] - 1s 2ms/step - loss: 0.8984 - accuracy: 0.6935 - val_loss: 0.6917 - val_accuracy: 0.7750
Epoch 146/200
437/437 [==============================] - 1s 2ms/step - loss: 0.8720 - accuracy: 0.7049 - val_loss: 0.6949 - val_accuracy: 0.7756
Epoch 147/200
437/437 [==============================] - 1s 1ms/step - loss: 0.8869 - accuracy: 0.7011 - val_loss: 0.6902 - val_accuracy: 0.7728
Epoch 148/200
437/437 [==============================] - 1s 1ms/step - loss: 0.8845 - accuracy: 0.7002 - val_loss: 0.6975 - val_accuracy: 0.7819
Epoch 149/200
437/437 [==============================] - 1s 1ms/step - loss: 0.8795 - accuracy: 0.7075 - val_loss: 0.6903 - val_accuracy: 0.7796
Epoch 150/200
437/437 [==============================] - 1s 1ms/step - loss: 0.8883 - accuracy: 0.7065 - val_loss: 0.6962 - val_accuracy: 0.7779
Epoch 151/200
437/437 [==============================] - 1s 1ms/step - loss: 0.8949 - accuracy: 0.6955 - val_loss: 0.6894 - val_accuracy: 0.7750
Epoch 152/200
437/437 [==============================] - 1s 1ms/step - loss: 0.8783 - accuracy: 0.7032 - val_loss: 0.6984 - val_accuracy: 0.7762
Epoch 153/200
437/437 [==============================] - 1s 1ms/step - loss: 0.9086 - accuracy: 0.6935 - val_loss: 0.6934 - val_accuracy: 0.7728
Epoch 154/200
437/437 [==============================] - 1s 1ms/step - loss: 0.8913 - accuracy: 0.6955 - val_loss: 0.6892 - val_accuracy: 0.7722
Epoch 155/200
437/437 [==============================] - 1s 1ms/step - loss: 0.9185 - accuracy: 0.6974 - val_loss: 0.7108 - val_accuracy: 0.7728
Epoch 156/200
437/437 [==============================] - 1s 1ms/step - loss: 0.8770 - accuracy: 0.7054 - val_loss: 0.7096 - val_accuracy: 0.7653
Epoch 157/200
437/437 [==============================] - 1s 1ms/step - loss: 0.8814 - accuracy: 0.7001 - val_loss: 0.7082 - val_accuracy: 0.7647
Epoch 158/200
437/437 [==============================] - 1s 1ms/step - loss: 0.8942 - accuracy: 0.6981 - val_loss: 0.7145 - val_accuracy: 0.7624
Epoch 159/200
437/437 [==============================] - 1s 1ms/step - loss: 0.8826 - accuracy: 0.7068 - val_loss: 0.7130 - val_accuracy: 0.7630
Epoch 160/200
437/437 [==============================] - 1s 1ms/step - loss: 0.8615 - accuracy: 0.7112 - val_loss: 0.6788 - val_accuracy: 0.7905
Epoch 161/200
437/437 [==============================] - 1s 1ms/step - loss: 0.9068 - accuracy: 0.6996 - val_loss: 0.7017 - val_accuracy: 0.7653
Epoch 162/200
437/437 [==============================] - 1s 1ms/step - loss: 0.8746 - accuracy: 0.7014 - val_loss: 0.6693 - val_accuracy: 0.7899
Epoch 163/200
437/437 [==============================] - 1s 2ms/step - loss: 0.8788 - accuracy: 0.7107 - val_loss: 0.7056 - val_accuracy: 0.7773
Epoch 164/200
437/437 [==============================] - 1s 1ms/step - loss: 0.8868 - accuracy: 0.7037 - val_loss: 0.6873 - val_accuracy: 0.7756
Epoch 165/200
437/437 [==============================] - 1s 1ms/step - loss: 0.8854 - accuracy: 0.7048 - val_loss: 0.6780 - val_accuracy: 0.7779
Epoch 166/200
437/437 [==============================] - 1s 1ms/step - loss: 0.8788 - accuracy: 0.7032 - val_loss: 0.7029 - val_accuracy: 0.7699
Epoch 167/200
437/437 [==============================] - 1s 1ms/step - loss: 0.8840 - accuracy: 0.7016 - val_loss: 0.6853 - val_accuracy: 0.7756
Epoch 168/200
437/437 [==============================] - 1s 1ms/step - loss: 0.8667 - accuracy: 0.7078 - val_loss: 0.6947 - val_accuracy: 0.7728
Epoch 169/200
437/437 [==============================] - 1s 1ms/step - loss: 0.8815 - accuracy: 0.7032 - val_loss: 0.6883 - val_accuracy: 0.7779
Epoch 170/200
437/437 [==============================] - 1s 2ms/step - loss: 0.8944 - accuracy: 0.6979 - val_loss: 0.6810 - val_accuracy: 0.7790
Epoch 171/200
437/437 [==============================] - 1s 1ms/step - loss: 0.8565 - accuracy: 0.7155 - val_loss: 0.6824 - val_accuracy: 0.7722
Epoch 172/200
437/437 [==============================] - 1s 1ms/step - loss: 0.8663 - accuracy: 0.7068 - val_loss: 0.6857 - val_accuracy: 0.7745
Epoch 173/200
437/437 [==============================] - 1s 1ms/step - loss: 0.8739 - accuracy: 0.7105 - val_loss: 0.7058 - val_accuracy: 0.7682
Epoch 174/200
437/437 [==============================] - 1s 1ms/step - loss: 0.8616 - accuracy: 0.7127 - val_loss: 0.6788 - val_accuracy: 0.7745
Epoch 175/200
437/437 [==============================] - 1s 1ms/step - loss: 0.8579 - accuracy: 0.7088 - val_loss: 0.7036 - val_accuracy: 0.7665
Epoch 176/200
437/437 [==============================] - 1s 2ms/step - loss: 0.8759 - accuracy: 0.7051 - val_loss: 0.6807 - val_accuracy: 0.7768
Epoch 177/200
437/437 [==============================] - 1s 2ms/step - loss: 0.8721 - accuracy: 0.7054 - val_loss: 0.6797 - val_accuracy: 0.7745
Epoch 178/200
437/437 [==============================] - 1s 1ms/step - loss: 0.8514 - accuracy: 0.7184 - val_loss: 0.6863 - val_accuracy: 0.7699
Epoch 179/200
437/437 [==============================] - 1s 1ms/step - loss: 0.8601 - accuracy: 0.7102 - val_loss: 0.6857 - val_accuracy: 0.7773
Epoch 180/200
437/437 [==============================] - 1s 1ms/step - loss: 0.8809 - accuracy: 0.7022 - val_loss: 0.6794 - val_accuracy: 0.7819
Epoch 181/200
437/437 [==============================] - 1s 1ms/step - loss: 0.8728 - accuracy: 0.7031 - val_loss: 0.7144 - val_accuracy: 0.7539
Epoch 182/200
437/437 [==============================] - 1s 1ms/step - loss: 0.8796 - accuracy: 0.7031 - val_loss: 0.6798 - val_accuracy: 0.7819
Epoch 183/200
437/437 [==============================] - 1s 1ms/step - loss: 0.8783 - accuracy: 0.7104 - val_loss: 0.6777 - val_accuracy: 0.7768
Epoch 184/200
437/437 [==============================] - 1s 1ms/step - loss: 0.8567 - accuracy: 0.7028 - val_loss: 0.6543 - val_accuracy: 0.7836
Epoch 185/200
437/437 [==============================] - 1s 1ms/step - loss: 0.8721 - accuracy: 0.7132 - val_loss: 0.6719 - val_accuracy: 0.7716
Epoch 186/200
437/437 [==============================] - 1s 1ms/step - loss: 0.8785 - accuracy: 0.7092 - val_loss: 0.6882 - val_accuracy: 0.7653
Epoch 187/200
437/437 [==============================] - 1s 1ms/step - loss: 0.8702 - accuracy: 0.7032 - val_loss: 0.6849 - val_accuracy: 0.7733
Epoch 188/200
437/437 [==============================] - 1s 2ms/step - loss: 0.8508 - accuracy: 0.7134 - val_loss: 0.6769 - val_accuracy: 0.7779
Epoch 189/200
437/437 [==============================] - 1s 1ms/step - loss: 0.8765 - accuracy: 0.7044 - val_loss: 0.6891 - val_accuracy: 0.7728
Epoch 190/200
437/437 [==============================] - 1s 1ms/step - loss: 0.8586 - accuracy: 0.7081 - val_loss: 0.6690 - val_accuracy: 0.7779
Epoch 191/200
437/437 [==============================] - 1s 1ms/step - loss: 0.8567 - accuracy: 0.7178 - val_loss: 0.6808 - val_accuracy: 0.7739
Epoch 192/200
437/437 [==============================] - 1s 1ms/step - loss: 0.8826 - accuracy: 0.7005 - val_loss: 0.6881 - val_accuracy: 0.7808
Epoch 193/200
437/437 [==============================] - 1s 1ms/step - loss: 0.8554 - accuracy: 0.7112 - val_loss: 0.6963 - val_accuracy: 0.7687
Epoch 194/200
437/437 [==============================] - 1s 1ms/step - loss: 0.8674 - accuracy: 0.7038 - val_loss: 0.6826 - val_accuracy: 0.7682
Epoch 195/200
437/437 [==============================] - 1s 1ms/step - loss: 0.8690 - accuracy: 0.7051 - val_loss: 0.7196 - val_accuracy: 0.7590
Epoch 196/200
437/437 [==============================] - 1s 1ms/step - loss: 0.8856 - accuracy: 0.7055 - val_loss: 0.6802 - val_accuracy: 0.7779
Epoch 197/200
437/437 [==============================] - 1s 1ms/step - loss: 0.8835 - accuracy: 0.7044 - val_loss: 0.6797 - val_accuracy: 0.7790
Epoch 198/200
437/437 [==============================] - 1s 1ms/step - loss: 0.8591 - accuracy: 0.7075 - val_loss: 0.6875 - val_accuracy: 0.7693
Epoch 199/200
437/437 [==============================] - 1s 1ms/step - loss: 0.8766 - accuracy: 0.7094 - val_loss: 0.6744 - val_accuracy: 0.7739
Epoch 200/200
437/437 [==============================] - 1s 1ms/step - loss: 0.8797 - accuracy: 0.7042 - val_loss: 0.6686 - val_accuracy: 0.7785
Training finished in time:  0:02:09.654524
In [ ]:
 

Case 2: Epochs=200, No_batch=16¶

In [99]:
plot_accuracy(history,n_epochs_2,n_batch_2)
In [105]:
plot_losses(history,n_epochs_2)

4. Convolutional Neural Network¶

In [110]:
import numpy as np
from keras.models import Sequential
from keras.layers import Dense, Dropout, Activation, Flatten
from keras.layers import Convolution2D, Conv2D, MaxPooling2D, GlobalAveragePooling2D
from keras.layers import Convolution2D, MaxPooling2D
from keras.optimizers import Adam
from keras.utils import to_categorical
from sklearn import metrics 
from keras.callbacks import ModelCheckpoint 
from datetime import datetime 
from sklearn.preprocessing import LabelEncoder
from keras.utils import to_categorical
from sklearn.model_selection import train_test_split 
from keras.models import model_from_json

Data reshaping¶

In [111]:
num_rows = 40
num_columns = 174
num_channels = 1

x_train_2D = x_train_2D.reshape(x_train_2D.shape[0], num_rows, num_columns, num_channels)
x_test_2D = x_test_2D.reshape(x_test_2D.shape[0], num_rows, num_columns, num_channels)

num_labels = yy_2D.shape[1]
filter_size = 2

Model Definition¶

In [112]:
# Model Configuration

cnn_model_num = 5

n_labels = yy_1D.shape[1]
n_conv_layers = 4
n_mlp_layers = 2

model = Sequential()
model.add(Conv2D(filters=16, kernel_size=2, input_shape=(num_rows, num_columns, num_channels), activation='relu'))
model.add(MaxPooling2D(pool_size=2))
model.add(Dropout(0.2))

model.add(Conv2D(filters=32, kernel_size=2, activation='relu'))
model.add(MaxPooling2D(pool_size=2))
model.add(Dropout(0.2))

model.add(Conv2D(filters=64, kernel_size=2, activation='relu'))
model.add(MaxPooling2D(pool_size=2))
model.add(Dropout(0.2))

model.add(Conv2D(filters=128, kernel_size=2, activation='relu'))
model.add(MaxPooling2D(pool_size=2))
model.add(Dropout(0.2))
#model.add(GlobalAveragePooling2D())

model.add(Flatten())

model.add(Dense(256))
model.add(Activation('relu'))
model.add(Dropout(0.5))

model.add(Dense(256))
model.add(Activation('relu'))
model.add(Dropout(0.5))

model.add(Dense(n_labels))
model.add(Activation('softmax'))

model.compile(loss='categorical_crossentropy', metrics=['accuracy'], optimizer='adam')

Summarizing Model¶

In [114]:
model.summary()

# Calculate pre-training accuracy 
score = model.evaluate(x_test_2D, y_test_2D, verbose=1)
accuracy = 100*score[1]

print("Pre-training accuracy: %.4f%%" % accuracy)
Model: "sequential_6"
_________________________________________________________________
 Layer (type)                Output Shape              Param #   
=================================================================
 conv2d (Conv2D)             (None, 39, 173, 16)       80        
                                                                 
 max_pooling2d (MaxPooling2  (None, 19, 86, 16)        0         
 D)                                                              
                                                                 
 dropout_12 (Dropout)        (None, 19, 86, 16)        0         
                                                                 
 conv2d_1 (Conv2D)           (None, 18, 85, 32)        2080      
                                                                 
 max_pooling2d_1 (MaxPoolin  (None, 9, 42, 32)         0         
 g2D)                                                            
                                                                 
 dropout_13 (Dropout)        (None, 9, 42, 32)         0         
                                                                 
 conv2d_2 (Conv2D)           (None, 8, 41, 64)         8256      
                                                                 
 max_pooling2d_2 (MaxPoolin  (None, 4, 20, 64)         0         
 g2D)                                                            
                                                                 
 dropout_14 (Dropout)        (None, 4, 20, 64)         0         
                                                                 
 conv2d_3 (Conv2D)           (None, 3, 19, 128)        32896     
                                                                 
 max_pooling2d_3 (MaxPoolin  (None, 1, 9, 128)         0         
 g2D)                                                            
                                                                 
 dropout_15 (Dropout)        (None, 1, 9, 128)         0         
                                                                 
 flatten (Flatten)           (None, 1152)              0         
                                                                 
 dense_18 (Dense)            (None, 256)               295168    
                                                                 
 activation_18 (Activation)  (None, 256)               0         
                                                                 
 dropout_16 (Dropout)        (None, 256)               0         
                                                                 
 dense_19 (Dense)            (None, 256)               65792     
                                                                 
 activation_19 (Activation)  (None, 256)               0         
                                                                 
 dropout_17 (Dropout)        (None, 256)               0         
                                                                 
 dense_20 (Dense)            (None, 10)                2570      
                                                                 
 activation_20 (Activation)  (None, 10)                0         
                                                                 
=================================================================
Total params: 406842 (1.55 MB)
Trainable params: 406842 (1.55 MB)
Non-trainable params: 0 (0.00 Byte)
_________________________________________________________________
55/55 [==============================] - 1s 20ms/step - loss: 3.5678 - accuracy: 0.1065
Pre-training accuracy: 10.6468%

Model Execution¶

In [115]:
num_epochs = 200
num_batch_size = 256

start = datetime.now()

history = model.fit(x_train_2D, 
          y_train_2D, 
          batch_size=num_batch_size, 
          epochs=num_epochs, 
          validation_data=(x_test_2D, y_test_2D),  
          verbose=1)

duration = datetime.now() - start
print("Training completed in time: ", duration)
Epoch 1/200
28/28 [==============================] - 20s 666ms/step - loss: 5.2682 - accuracy: 0.1210 - val_loss: 2.2766 - val_accuracy: 0.1923
Epoch 2/200
28/28 [==============================] - 16s 583ms/step - loss: 2.2443 - accuracy: 0.1626 - val_loss: 2.2073 - val_accuracy: 0.2210
Epoch 3/200
28/28 [==============================] - 15s 554ms/step - loss: 2.1717 - accuracy: 0.1980 - val_loss: 2.1329 - val_accuracy: 0.2553
Epoch 4/200
28/28 [==============================] - 16s 556ms/step - loss: 2.1056 - accuracy: 0.2206 - val_loss: 2.0706 - val_accuracy: 0.2822
Epoch 5/200
28/28 [==============================] - 15s 554ms/step - loss: 2.0502 - accuracy: 0.2487 - val_loss: 2.0159 - val_accuracy: 0.2833
Epoch 6/200
28/28 [==============================] - 15s 554ms/step - loss: 1.9452 - accuracy: 0.2805 - val_loss: 1.9698 - val_accuracy: 0.2753
Epoch 7/200
28/28 [==============================] - 16s 563ms/step - loss: 1.8753 - accuracy: 0.3026 - val_loss: 1.9002 - val_accuracy: 0.3171
Epoch 8/200
28/28 [==============================] - 15s 551ms/step - loss: 1.8043 - accuracy: 0.3323 - val_loss: 1.8043 - val_accuracy: 0.3578
Epoch 9/200
28/28 [==============================] - 16s 569ms/step - loss: 1.7327 - accuracy: 0.3522 - val_loss: 1.7456 - val_accuracy: 0.3892
Epoch 10/200
28/28 [==============================] - 16s 556ms/step - loss: 1.6744 - accuracy: 0.3888 - val_loss: 1.6930 - val_accuracy: 0.4184
Epoch 11/200
28/28 [==============================] - 18s 636ms/step - loss: 1.5930 - accuracy: 0.4180 - val_loss: 1.5688 - val_accuracy: 0.4900
Epoch 12/200
28/28 [==============================] - 16s 587ms/step - loss: 1.5385 - accuracy: 0.4339 - val_loss: 1.4900 - val_accuracy: 0.5117
Epoch 13/200
28/28 [==============================] - 18s 638ms/step - loss: 1.5057 - accuracy: 0.4598 - val_loss: 1.4484 - val_accuracy: 0.5026
Epoch 14/200
28/28 [==============================] - 16s 566ms/step - loss: 1.4332 - accuracy: 0.4760 - val_loss: 1.4313 - val_accuracy: 0.5072
Epoch 15/200
28/28 [==============================] - 17s 601ms/step - loss: 1.3902 - accuracy: 0.5032 - val_loss: 1.3649 - val_accuracy: 0.5386
Epoch 16/200
28/28 [==============================] - 22s 776ms/step - loss: 1.3485 - accuracy: 0.5178 - val_loss: 1.3413 - val_accuracy: 0.5495
Epoch 17/200
28/28 [==============================] - 25s 915ms/step - loss: 1.3098 - accuracy: 0.5361 - val_loss: 1.2577 - val_accuracy: 0.6027
Epoch 18/200
28/28 [==============================] - 29s 1s/step - loss: 1.2899 - accuracy: 0.5442 - val_loss: 1.2281 - val_accuracy: 0.6045
Epoch 19/200
28/28 [==============================] - 22s 777ms/step - loss: 1.2351 - accuracy: 0.5682 - val_loss: 1.2024 - val_accuracy: 0.6113
Epoch 20/200
28/28 [==============================] - 27s 954ms/step - loss: 1.2017 - accuracy: 0.5757 - val_loss: 1.1937 - val_accuracy: 0.5913
Epoch 21/200
28/28 [==============================] - 23s 832ms/step - loss: 1.1805 - accuracy: 0.5895 - val_loss: 1.1377 - val_accuracy: 0.6234
Epoch 22/200
28/28 [==============================] - 27s 975ms/step - loss: 1.1475 - accuracy: 0.6001 - val_loss: 1.1102 - val_accuracy: 0.6319
Epoch 23/200
28/28 [==============================] - 25s 891ms/step - loss: 1.1042 - accuracy: 0.6120 - val_loss: 1.0592 - val_accuracy: 0.6474
Epoch 24/200
28/28 [==============================] - 24s 861ms/step - loss: 1.0864 - accuracy: 0.6225 - val_loss: 1.0364 - val_accuracy: 0.6588
Epoch 25/200
28/28 [==============================] - 24s 840ms/step - loss: 1.0360 - accuracy: 0.6445 - val_loss: 1.0354 - val_accuracy: 0.6428
Epoch 26/200
28/28 [==============================] - 19s 664ms/step - loss: 1.0284 - accuracy: 0.6445 - val_loss: 1.0007 - val_accuracy: 0.6789
Epoch 27/200
28/28 [==============================] - 18s 632ms/step - loss: 1.0125 - accuracy: 0.6520 - val_loss: 0.9817 - val_accuracy: 0.6869
Epoch 28/200
28/28 [==============================] - 17s 609ms/step - loss: 0.9663 - accuracy: 0.6746 - val_loss: 0.9559 - val_accuracy: 0.6800
Epoch 29/200
28/28 [==============================] - 17s 613ms/step - loss: 0.9404 - accuracy: 0.6775 - val_loss: 0.9198 - val_accuracy: 0.6983
Epoch 30/200
28/28 [==============================] - 19s 667ms/step - loss: 0.9341 - accuracy: 0.6835 - val_loss: 0.9175 - val_accuracy: 0.6880
Epoch 31/200
28/28 [==============================] - 18s 661ms/step - loss: 0.9000 - accuracy: 0.6972 - val_loss: 0.8976 - val_accuracy: 0.6961
Epoch 32/200
28/28 [==============================] - 17s 608ms/step - loss: 0.8588 - accuracy: 0.7087 - val_loss: 0.8131 - val_accuracy: 0.7281
Epoch 33/200
28/28 [==============================] - 19s 675ms/step - loss: 0.8467 - accuracy: 0.7074 - val_loss: 0.8348 - val_accuracy: 0.7172
Epoch 34/200
28/28 [==============================] - 19s 668ms/step - loss: 0.7971 - accuracy: 0.7307 - val_loss: 0.8129 - val_accuracy: 0.7315
Epoch 35/200
28/28 [==============================] - 17s 611ms/step - loss: 0.8043 - accuracy: 0.7278 - val_loss: 0.7437 - val_accuracy: 0.7521
Epoch 36/200
28/28 [==============================] - 17s 618ms/step - loss: 0.7775 - accuracy: 0.7374 - val_loss: 0.7834 - val_accuracy: 0.7333
Epoch 37/200
28/28 [==============================] - 17s 610ms/step - loss: 0.7463 - accuracy: 0.7493 - val_loss: 0.7832 - val_accuracy: 0.7235
Epoch 38/200
28/28 [==============================] - 19s 688ms/step - loss: 0.7387 - accuracy: 0.7538 - val_loss: 0.7311 - val_accuracy: 0.7550
Epoch 39/200
28/28 [==============================] - 17s 615ms/step - loss: 0.7119 - accuracy: 0.7621 - val_loss: 0.6791 - val_accuracy: 0.7722
Epoch 40/200
28/28 [==============================] - 19s 690ms/step - loss: 0.6816 - accuracy: 0.7682 - val_loss: 0.6713 - val_accuracy: 0.7716
Epoch 41/200
28/28 [==============================] - 19s 664ms/step - loss: 0.6729 - accuracy: 0.7735 - val_loss: 0.6884 - val_accuracy: 0.7624
Epoch 42/200
28/28 [==============================] - 18s 652ms/step - loss: 0.6800 - accuracy: 0.7704 - val_loss: 0.6811 - val_accuracy: 0.7705
Epoch 43/200
28/28 [==============================] - 18s 637ms/step - loss: 0.6410 - accuracy: 0.7843 - val_loss: 0.6386 - val_accuracy: 0.7888
Epoch 44/200
28/28 [==============================] - 20s 731ms/step - loss: 0.6411 - accuracy: 0.7851 - val_loss: 0.6268 - val_accuracy: 0.7934
Epoch 45/200
28/28 [==============================] - 18s 656ms/step - loss: 0.6275 - accuracy: 0.7870 - val_loss: 0.6303 - val_accuracy: 0.7899
Epoch 46/200
28/28 [==============================] - 22s 799ms/step - loss: 0.6006 - accuracy: 0.8033 - val_loss: 0.5842 - val_accuracy: 0.8117
Epoch 47/200
28/28 [==============================] - 27s 947ms/step - loss: 0.5940 - accuracy: 0.8036 - val_loss: 0.5823 - val_accuracy: 0.8037
Epoch 48/200
28/28 [==============================] - 28s 1s/step - loss: 0.5896 - accuracy: 0.8037 - val_loss: 0.6105 - val_accuracy: 0.7871
Epoch 49/200
28/28 [==============================] - 27s 966ms/step - loss: 0.5938 - accuracy: 0.7993 - val_loss: 0.5975 - val_accuracy: 0.8042
Epoch 50/200
28/28 [==============================] - 29s 1s/step - loss: 0.5447 - accuracy: 0.8185 - val_loss: 0.6088 - val_accuracy: 0.7991
Epoch 51/200
28/28 [==============================] - 26s 940ms/step - loss: 0.5285 - accuracy: 0.8203 - val_loss: 0.5580 - val_accuracy: 0.8157
Epoch 52/200
28/28 [==============================] - 28s 995ms/step - loss: 0.5362 - accuracy: 0.8209 - val_loss: 0.5751 - val_accuracy: 0.8094
Epoch 53/200
28/28 [==============================] - 26s 939ms/step - loss: 0.5283 - accuracy: 0.8195 - val_loss: 0.5584 - val_accuracy: 0.8088
Epoch 54/200
28/28 [==============================] - 27s 981ms/step - loss: 0.5072 - accuracy: 0.8338 - val_loss: 0.5224 - val_accuracy: 0.8203
Epoch 55/200
28/28 [==============================] - 27s 974ms/step - loss: 0.5071 - accuracy: 0.8279 - val_loss: 0.5335 - val_accuracy: 0.8128
Epoch 56/200
28/28 [==============================] - 30s 1s/step - loss: 0.5021 - accuracy: 0.8309 - val_loss: 0.5385 - val_accuracy: 0.8157
Epoch 57/200
28/28 [==============================] - 27s 949ms/step - loss: 0.4787 - accuracy: 0.8381 - val_loss: 0.5100 - val_accuracy: 0.8226
Epoch 58/200
28/28 [==============================] - 31s 1s/step - loss: 0.4641 - accuracy: 0.8491 - val_loss: 0.5132 - val_accuracy: 0.8254
Epoch 59/200
28/28 [==============================] - 24s 848ms/step - loss: 0.4723 - accuracy: 0.8395 - val_loss: 0.5301 - val_accuracy: 0.8254
Epoch 60/200
28/28 [==============================] - 23s 835ms/step - loss: 0.4605 - accuracy: 0.8455 - val_loss: 0.5058 - val_accuracy: 0.8317
Epoch 61/200
28/28 [==============================] - 28s 1s/step - loss: 0.4571 - accuracy: 0.8462 - val_loss: 0.4722 - val_accuracy: 0.8443
Epoch 62/200
28/28 [==============================] - 24s 836ms/step - loss: 0.4267 - accuracy: 0.8553 - val_loss: 0.4744 - val_accuracy: 0.8454
Epoch 63/200
28/28 [==============================] - 23s 821ms/step - loss: 0.4366 - accuracy: 0.8545 - val_loss: 0.4502 - val_accuracy: 0.8454
Epoch 64/200
28/28 [==============================] - 25s 906ms/step - loss: 0.4339 - accuracy: 0.8527 - val_loss: 0.4484 - val_accuracy: 0.8535
Epoch 65/200
28/28 [==============================] - 21s 751ms/step - loss: 0.4047 - accuracy: 0.8603 - val_loss: 0.4542 - val_accuracy: 0.8449
Epoch 66/200
28/28 [==============================] - 20s 706ms/step - loss: 0.4231 - accuracy: 0.8588 - val_loss: 0.4940 - val_accuracy: 0.8426
Epoch 67/200
28/28 [==============================] - 21s 736ms/step - loss: 0.3845 - accuracy: 0.8700 - val_loss: 0.4596 - val_accuracy: 0.8603
Epoch 68/200
28/28 [==============================] - 26s 925ms/step - loss: 0.3859 - accuracy: 0.8693 - val_loss: 0.4406 - val_accuracy: 0.8495
Epoch 69/200
28/28 [==============================] - 20s 712ms/step - loss: 0.3840 - accuracy: 0.8710 - val_loss: 0.4347 - val_accuracy: 0.8523
Epoch 70/200
28/28 [==============================] - 17s 625ms/step - loss: 0.3750 - accuracy: 0.8742 - val_loss: 0.4471 - val_accuracy: 0.8569
Epoch 71/200
28/28 [==============================] - 26s 929ms/step - loss: 0.3624 - accuracy: 0.8816 - val_loss: 0.4504 - val_accuracy: 0.8443
Epoch 72/200
28/28 [==============================] - 24s 856ms/step - loss: 0.3818 - accuracy: 0.8730 - val_loss: 0.4672 - val_accuracy: 0.8426
Epoch 73/200
28/28 [==============================] - 27s 971ms/step - loss: 0.3587 - accuracy: 0.8805 - val_loss: 0.4309 - val_accuracy: 0.8569
Epoch 74/200
28/28 [==============================] - 27s 945ms/step - loss: 0.3512 - accuracy: 0.8842 - val_loss: 0.4342 - val_accuracy: 0.8506
Epoch 75/200
28/28 [==============================] - 41s 1s/step - loss: 0.3309 - accuracy: 0.8882 - val_loss: 0.3838 - val_accuracy: 0.8758
Epoch 76/200
28/28 [==============================] - 22s 757ms/step - loss: 0.3419 - accuracy: 0.8820 - val_loss: 0.4485 - val_accuracy: 0.8552
Epoch 77/200
28/28 [==============================] - 19s 686ms/step - loss: 0.3531 - accuracy: 0.8889 - val_loss: 0.4286 - val_accuracy: 0.8580
Epoch 78/200
28/28 [==============================] - 25s 880ms/step - loss: 0.3321 - accuracy: 0.8905 - val_loss: 0.4121 - val_accuracy: 0.8603
Epoch 79/200
28/28 [==============================] - 24s 848ms/step - loss: 0.3287 - accuracy: 0.8899 - val_loss: 0.3846 - val_accuracy: 0.8735
Epoch 80/200
28/28 [==============================] - 21s 751ms/step - loss: 0.3273 - accuracy: 0.8913 - val_loss: 0.4067 - val_accuracy: 0.8655
Epoch 81/200
28/28 [==============================] - 20s 703ms/step - loss: 0.3164 - accuracy: 0.8925 - val_loss: 0.3721 - val_accuracy: 0.8706
Epoch 82/200
28/28 [==============================] - 17s 589ms/step - loss: 0.3201 - accuracy: 0.8923 - val_loss: 0.4152 - val_accuracy: 0.8718
Epoch 83/200
28/28 [==============================] - 16s 575ms/step - loss: 0.3137 - accuracy: 0.8961 - val_loss: 0.3870 - val_accuracy: 0.8746
Epoch 84/200
28/28 [==============================] - 16s 566ms/step - loss: 0.3133 - accuracy: 0.8926 - val_loss: 0.3720 - val_accuracy: 0.8764
Epoch 85/200
28/28 [==============================] - 16s 569ms/step - loss: 0.3064 - accuracy: 0.8935 - val_loss: 0.4092 - val_accuracy: 0.8626
Epoch 86/200
28/28 [==============================] - 16s 563ms/step - loss: 0.2956 - accuracy: 0.8985 - val_loss: 0.3714 - val_accuracy: 0.8769
Epoch 87/200
28/28 [==============================] - 19s 673ms/step - loss: 0.3015 - accuracy: 0.8999 - val_loss: 0.3854 - val_accuracy: 0.8746
Epoch 88/200
28/28 [==============================] - 18s 612ms/step - loss: 0.2939 - accuracy: 0.9006 - val_loss: 0.3767 - val_accuracy: 0.8695
Epoch 89/200
28/28 [==============================] - 16s 562ms/step - loss: 0.2716 - accuracy: 0.9097 - val_loss: 0.3615 - val_accuracy: 0.8895
Epoch 90/200
28/28 [==============================] - 15s 551ms/step - loss: 0.2778 - accuracy: 0.9068 - val_loss: 0.3446 - val_accuracy: 0.8941
Epoch 91/200
28/28 [==============================] - 16s 558ms/step - loss: 0.2702 - accuracy: 0.9097 - val_loss: 0.3584 - val_accuracy: 0.8849
Epoch 92/200
28/28 [==============================] - 16s 554ms/step - loss: 0.2826 - accuracy: 0.9058 - val_loss: 0.3243 - val_accuracy: 0.8924
Epoch 93/200
28/28 [==============================] - 16s 559ms/step - loss: 0.2607 - accuracy: 0.9124 - val_loss: 0.3638 - val_accuracy: 0.8809
Epoch 94/200
28/28 [==============================] - 15s 548ms/step - loss: 0.2724 - accuracy: 0.9110 - val_loss: 0.3820 - val_accuracy: 0.8718
Epoch 95/200
28/28 [==============================] - 15s 551ms/step - loss: 0.2785 - accuracy: 0.9042 - val_loss: 0.3920 - val_accuracy: 0.8769
Epoch 96/200
28/28 [==============================] - 15s 548ms/step - loss: 0.2485 - accuracy: 0.9157 - val_loss: 0.3559 - val_accuracy: 0.8838
Epoch 97/200
28/28 [==============================] - 16s 557ms/step - loss: 0.2557 - accuracy: 0.9137 - val_loss: 0.3480 - val_accuracy: 0.8798
Epoch 98/200
28/28 [==============================] - 15s 548ms/step - loss: 0.2786 - accuracy: 0.9059 - val_loss: 0.3629 - val_accuracy: 0.8832
Epoch 99/200
28/28 [==============================] - 16s 555ms/step - loss: 0.2371 - accuracy: 0.9197 - val_loss: 0.3368 - val_accuracy: 0.8867
Epoch 100/200
28/28 [==============================] - 15s 550ms/step - loss: 0.2303 - accuracy: 0.9184 - val_loss: 0.3363 - val_accuracy: 0.8895
Epoch 101/200
28/28 [==============================] - 16s 569ms/step - loss: 0.2481 - accuracy: 0.9191 - val_loss: 0.3710 - val_accuracy: 0.8844
Epoch 102/200
28/28 [==============================] - 16s 561ms/step - loss: 0.2423 - accuracy: 0.9181 - val_loss: 0.3207 - val_accuracy: 0.9044
Epoch 103/200
28/28 [==============================] - 15s 546ms/step - loss: 0.2318 - accuracy: 0.9187 - val_loss: 0.3261 - val_accuracy: 0.9015
Epoch 104/200
28/28 [==============================] - 16s 555ms/step - loss: 0.2291 - accuracy: 0.9246 - val_loss: 0.3103 - val_accuracy: 0.8998
Epoch 105/200
28/28 [==============================] - 16s 561ms/step - loss: 0.2186 - accuracy: 0.9225 - val_loss: 0.3275 - val_accuracy: 0.8998
Epoch 106/200
28/28 [==============================] - 15s 550ms/step - loss: 0.2386 - accuracy: 0.9233 - val_loss: 0.3194 - val_accuracy: 0.8964
Epoch 107/200
28/28 [==============================] - 16s 562ms/step - loss: 0.2321 - accuracy: 0.9203 - val_loss: 0.3172 - val_accuracy: 0.8981
Epoch 108/200
28/28 [==============================] - 17s 602ms/step - loss: 0.2097 - accuracy: 0.9303 - val_loss: 0.3295 - val_accuracy: 0.8947
Epoch 109/200
28/28 [==============================] - 16s 575ms/step - loss: 0.2015 - accuracy: 0.9306 - val_loss: 0.3363 - val_accuracy: 0.8958
Epoch 110/200
28/28 [==============================] - 16s 564ms/step - loss: 0.2098 - accuracy: 0.9307 - val_loss: 0.3087 - val_accuracy: 0.9033
Epoch 111/200
28/28 [==============================] - 15s 552ms/step - loss: 0.2094 - accuracy: 0.9301 - val_loss: 0.3381 - val_accuracy: 0.8952
Epoch 112/200
28/28 [==============================] - 16s 561ms/step - loss: 0.2006 - accuracy: 0.9336 - val_loss: 0.3310 - val_accuracy: 0.8975
Epoch 113/200
28/28 [==============================] - 15s 545ms/step - loss: 0.2032 - accuracy: 0.9311 - val_loss: 0.3243 - val_accuracy: 0.9004
Epoch 114/200
28/28 [==============================] - 15s 546ms/step - loss: 0.2181 - accuracy: 0.9273 - val_loss: 0.3581 - val_accuracy: 0.8884
Epoch 115/200
28/28 [==============================] - 15s 551ms/step - loss: 0.2064 - accuracy: 0.9330 - val_loss: 0.3236 - val_accuracy: 0.8998
Epoch 116/200
28/28 [==============================] - 15s 544ms/step - loss: 0.1998 - accuracy: 0.9331 - val_loss: 0.3351 - val_accuracy: 0.8964
Epoch 117/200
28/28 [==============================] - 15s 542ms/step - loss: 0.1864 - accuracy: 0.9353 - val_loss: 0.3185 - val_accuracy: 0.9021
Epoch 118/200
28/28 [==============================] - 15s 546ms/step - loss: 0.2026 - accuracy: 0.9331 - val_loss: 0.3181 - val_accuracy: 0.8998
Epoch 119/200
28/28 [==============================] - 15s 547ms/step - loss: 0.2089 - accuracy: 0.9287 - val_loss: 0.3094 - val_accuracy: 0.9061
Epoch 120/200
28/28 [==============================] - 15s 553ms/step - loss: 0.1820 - accuracy: 0.9407 - val_loss: 0.3048 - val_accuracy: 0.9078
Epoch 121/200
28/28 [==============================] - 16s 556ms/step - loss: 0.1872 - accuracy: 0.9400 - val_loss: 0.3170 - val_accuracy: 0.9021
Epoch 122/200
28/28 [==============================] - 15s 554ms/step - loss: 0.1842 - accuracy: 0.9377 - val_loss: 0.3445 - val_accuracy: 0.8884
Epoch 123/200
28/28 [==============================] - 16s 563ms/step - loss: 0.1849 - accuracy: 0.9387 - val_loss: 0.3267 - val_accuracy: 0.8970
Epoch 124/200
28/28 [==============================] - 16s 554ms/step - loss: 0.1970 - accuracy: 0.9320 - val_loss: 0.3592 - val_accuracy: 0.8918
Epoch 125/200
28/28 [==============================] - 16s 562ms/step - loss: 0.2056 - accuracy: 0.9327 - val_loss: 0.3170 - val_accuracy: 0.9038
Epoch 126/200
28/28 [==============================] - 16s 555ms/step - loss: 0.1926 - accuracy: 0.9311 - val_loss: 0.3138 - val_accuracy: 0.9021
Epoch 127/200
28/28 [==============================] - 470s 17s/step - loss: 0.1908 - accuracy: 0.9393 - val_loss: 0.3150 - val_accuracy: 0.8998
Epoch 128/200
28/28 [==============================] - 22s 774ms/step - loss: 0.1834 - accuracy: 0.9416 - val_loss: 0.2986 - val_accuracy: 0.9027
Epoch 129/200
28/28 [==============================] - 21s 772ms/step - loss: 0.1669 - accuracy: 0.9426 - val_loss: 0.3071 - val_accuracy: 0.9124
Epoch 130/200
28/28 [==============================] - 18s 657ms/step - loss: 0.1743 - accuracy: 0.9416 - val_loss: 0.2900 - val_accuracy: 0.9141
Epoch 131/200
28/28 [==============================] - 17s 616ms/step - loss: 0.1735 - accuracy: 0.9440 - val_loss: 0.2973 - val_accuracy: 0.9067
Epoch 132/200
28/28 [==============================] - 16s 573ms/step - loss: 0.1717 - accuracy: 0.9435 - val_loss: 0.2948 - val_accuracy: 0.9096
Epoch 133/200
28/28 [==============================] - 16s 567ms/step - loss: 0.1682 - accuracy: 0.9442 - val_loss: 0.3092 - val_accuracy: 0.9056
Epoch 134/200
28/28 [==============================] - 16s 572ms/step - loss: 0.1687 - accuracy: 0.9443 - val_loss: 0.2727 - val_accuracy: 0.9141
Epoch 135/200
28/28 [==============================] - 16s 581ms/step - loss: 0.1711 - accuracy: 0.9403 - val_loss: 0.3028 - val_accuracy: 0.9090
Epoch 136/200
28/28 [==============================] - 16s 576ms/step - loss: 0.1670 - accuracy: 0.9416 - val_loss: 0.3131 - val_accuracy: 0.9038
Epoch 137/200
28/28 [==============================] - 16s 572ms/step - loss: 0.1629 - accuracy: 0.9443 - val_loss: 0.3189 - val_accuracy: 0.9124
Epoch 138/200
28/28 [==============================] - 16s 589ms/step - loss: 0.1581 - accuracy: 0.9475 - val_loss: 0.2830 - val_accuracy: 0.9073
Epoch 139/200
28/28 [==============================] - 17s 595ms/step - loss: 0.1617 - accuracy: 0.9476 - val_loss: 0.3095 - val_accuracy: 0.9090
Epoch 140/200
28/28 [==============================] - 17s 610ms/step - loss: 0.1561 - accuracy: 0.9443 - val_loss: 0.3491 - val_accuracy: 0.8964
Epoch 141/200
28/28 [==============================] - 16s 588ms/step - loss: 0.1580 - accuracy: 0.9473 - val_loss: 0.3564 - val_accuracy: 0.8901
Epoch 142/200
28/28 [==============================] - 16s 567ms/step - loss: 0.1664 - accuracy: 0.9446 - val_loss: 0.3271 - val_accuracy: 0.8912
Epoch 143/200
28/28 [==============================] - 15s 542ms/step - loss: 0.1735 - accuracy: 0.9433 - val_loss: 0.2726 - val_accuracy: 0.9107
Epoch 144/200
28/28 [==============================] - 16s 561ms/step - loss: 0.1559 - accuracy: 0.9485 - val_loss: 0.3020 - val_accuracy: 0.9050
Epoch 145/200
28/28 [==============================] - 15s 548ms/step - loss: 0.1439 - accuracy: 0.9508 - val_loss: 0.3088 - val_accuracy: 0.9073
Epoch 146/200
28/28 [==============================] - 15s 555ms/step - loss: 0.1592 - accuracy: 0.9459 - val_loss: 0.3260 - val_accuracy: 0.8952
Epoch 147/200
28/28 [==============================] - 15s 540ms/step - loss: 0.1441 - accuracy: 0.9485 - val_loss: 0.2810 - val_accuracy: 0.9130
Epoch 148/200
28/28 [==============================] - 15s 541ms/step - loss: 0.1518 - accuracy: 0.9515 - val_loss: 0.2821 - val_accuracy: 0.9096
Epoch 149/200
28/28 [==============================] - 15s 549ms/step - loss: 0.1384 - accuracy: 0.9550 - val_loss: 0.3193 - val_accuracy: 0.9038
Epoch 150/200
28/28 [==============================] - 15s 547ms/step - loss: 0.1561 - accuracy: 0.9477 - val_loss: 0.3277 - val_accuracy: 0.8970
Epoch 151/200
28/28 [==============================] - 16s 572ms/step - loss: 0.1538 - accuracy: 0.9495 - val_loss: 0.3405 - val_accuracy: 0.9004
Epoch 152/200
28/28 [==============================] - 16s 579ms/step - loss: 0.1535 - accuracy: 0.9473 - val_loss: 0.2993 - val_accuracy: 0.9073
Epoch 153/200
28/28 [==============================] - 15s 550ms/step - loss: 0.1382 - accuracy: 0.9543 - val_loss: 0.2936 - val_accuracy: 0.9124
Epoch 154/200
28/28 [==============================] - 15s 546ms/step - loss: 0.1380 - accuracy: 0.9509 - val_loss: 0.2868 - val_accuracy: 0.9181
Epoch 155/200
28/28 [==============================] - 15s 544ms/step - loss: 0.1365 - accuracy: 0.9528 - val_loss: 0.3167 - val_accuracy: 0.9130
Epoch 156/200
28/28 [==============================] - 16s 555ms/step - loss: 0.1327 - accuracy: 0.9560 - val_loss: 0.2941 - val_accuracy: 0.9193
Epoch 157/200
28/28 [==============================] - 15s 554ms/step - loss: 0.1494 - accuracy: 0.9523 - val_loss: 0.3071 - val_accuracy: 0.9090
Epoch 158/200
28/28 [==============================] - 16s 579ms/step - loss: 0.1617 - accuracy: 0.9502 - val_loss: 0.3005 - val_accuracy: 0.9141
Epoch 159/200
28/28 [==============================] - 15s 541ms/step - loss: 0.1334 - accuracy: 0.9546 - val_loss: 0.3275 - val_accuracy: 0.9096
Epoch 160/200
28/28 [==============================] - 15s 544ms/step - loss: 0.1502 - accuracy: 0.9515 - val_loss: 0.2923 - val_accuracy: 0.9124
Epoch 161/200
28/28 [==============================] - 15s 543ms/step - loss: 0.1428 - accuracy: 0.9546 - val_loss: 0.3140 - val_accuracy: 0.9056
Epoch 162/200
28/28 [==============================] - 15s 544ms/step - loss: 0.1250 - accuracy: 0.9602 - val_loss: 0.2890 - val_accuracy: 0.9187
Epoch 163/200
28/28 [==============================] - 15s 541ms/step - loss: 0.1278 - accuracy: 0.9545 - val_loss: 0.2779 - val_accuracy: 0.9130
Epoch 164/200
28/28 [==============================] - 15s 542ms/step - loss: 0.1327 - accuracy: 0.9549 - val_loss: 0.3050 - val_accuracy: 0.9113
Epoch 165/200
28/28 [==============================] - 16s 559ms/step - loss: 0.1243 - accuracy: 0.9586 - val_loss: 0.2837 - val_accuracy: 0.9159
Epoch 166/200
28/28 [==============================] - 15s 546ms/step - loss: 0.1346 - accuracy: 0.9552 - val_loss: 0.2598 - val_accuracy: 0.9244
Epoch 167/200
28/28 [==============================] - 15s 545ms/step - loss: 0.1333 - accuracy: 0.9578 - val_loss: 0.3137 - val_accuracy: 0.9073
Epoch 168/200
28/28 [==============================] - 15s 544ms/step - loss: 0.1164 - accuracy: 0.9612 - val_loss: 0.2762 - val_accuracy: 0.9153
Epoch 169/200
28/28 [==============================] - 15s 546ms/step - loss: 0.1297 - accuracy: 0.9575 - val_loss: 0.2606 - val_accuracy: 0.9222
Epoch 170/200
28/28 [==============================] - 667s 25s/step - loss: 0.1293 - accuracy: 0.9540 - val_loss: 0.2969 - val_accuracy: 0.9101
Epoch 171/200
28/28 [==============================] - 22s 797ms/step - loss: 0.1324 - accuracy: 0.9553 - val_loss: 0.2921 - val_accuracy: 0.9187
Epoch 172/200
28/28 [==============================] - 16s 563ms/step - loss: 0.1273 - accuracy: 0.9565 - val_loss: 0.2622 - val_accuracy: 0.9210
Epoch 173/200
28/28 [==============================] - 16s 556ms/step - loss: 0.1237 - accuracy: 0.9606 - val_loss: 0.2671 - val_accuracy: 0.9262
Epoch 174/200
28/28 [==============================] - 16s 561ms/step - loss: 0.1173 - accuracy: 0.9603 - val_loss: 0.2959 - val_accuracy: 0.9136
Epoch 175/200
28/28 [==============================] - 16s 554ms/step - loss: 0.1170 - accuracy: 0.9593 - val_loss: 0.2643 - val_accuracy: 0.9216
Epoch 176/200
28/28 [==============================] - 15s 546ms/step - loss: 0.1244 - accuracy: 0.9601 - val_loss: 0.3103 - val_accuracy: 0.9130
Epoch 177/200
28/28 [==============================] - 15s 551ms/step - loss: 0.1288 - accuracy: 0.9593 - val_loss: 0.3006 - val_accuracy: 0.9164
Epoch 178/200
28/28 [==============================] - 15s 552ms/step - loss: 0.1235 - accuracy: 0.9573 - val_loss: 0.2679 - val_accuracy: 0.9244
Epoch 179/200
28/28 [==============================] - 15s 546ms/step - loss: 0.1153 - accuracy: 0.9601 - val_loss: 0.2866 - val_accuracy: 0.9216
Epoch 180/200
28/28 [==============================] - 15s 545ms/step - loss: 0.1101 - accuracy: 0.9618 - val_loss: 0.2765 - val_accuracy: 0.9256
Epoch 181/200
28/28 [==============================] - 15s 547ms/step - loss: 0.1261 - accuracy: 0.9593 - val_loss: 0.2697 - val_accuracy: 0.9244
Epoch 182/200
28/28 [==============================] - 15s 545ms/step - loss: 0.1193 - accuracy: 0.9609 - val_loss: 0.3223 - val_accuracy: 0.9113
Epoch 183/200
28/28 [==============================] - 16s 557ms/step - loss: 0.1112 - accuracy: 0.9644 - val_loss: 0.3197 - val_accuracy: 0.9090
Epoch 184/200
28/28 [==============================] - 15s 545ms/step - loss: 0.1120 - accuracy: 0.9652 - val_loss: 0.3549 - val_accuracy: 0.9078
Epoch 185/200
28/28 [==============================] - 15s 543ms/step - loss: 0.1142 - accuracy: 0.9644 - val_loss: 0.2738 - val_accuracy: 0.9227
Epoch 186/200
28/28 [==============================] - 15s 548ms/step - loss: 0.1137 - accuracy: 0.9602 - val_loss: 0.2755 - val_accuracy: 0.9279
Epoch 187/200
28/28 [==============================] - 15s 541ms/step - loss: 0.1406 - accuracy: 0.9569 - val_loss: 0.2814 - val_accuracy: 0.9222
Epoch 188/200
28/28 [==============================] - 15s 542ms/step - loss: 0.1150 - accuracy: 0.9613 - val_loss: 0.2985 - val_accuracy: 0.9176
Epoch 189/200
28/28 [==============================] - 15s 546ms/step - loss: 0.1096 - accuracy: 0.9648 - val_loss: 0.2774 - val_accuracy: 0.9153
Epoch 190/200
28/28 [==============================] - 15s 540ms/step - loss: 0.1092 - accuracy: 0.9656 - val_loss: 0.2635 - val_accuracy: 0.9267
Epoch 191/200
28/28 [==============================] - 15s 542ms/step - loss: 0.1057 - accuracy: 0.9646 - val_loss: 0.3240 - val_accuracy: 0.9096
Epoch 192/200
28/28 [==============================] - 15s 548ms/step - loss: 0.1093 - accuracy: 0.9634 - val_loss: 0.3133 - val_accuracy: 0.9124
Epoch 193/200
28/28 [==============================] - 15s 552ms/step - loss: 0.1140 - accuracy: 0.9639 - val_loss: 0.2903 - val_accuracy: 0.9193
Epoch 194/200
28/28 [==============================] - 15s 553ms/step - loss: 0.1096 - accuracy: 0.9646 - val_loss: 0.2983 - val_accuracy: 0.9124
Epoch 195/200
28/28 [==============================] - 15s 548ms/step - loss: 0.1011 - accuracy: 0.9674 - val_loss: 0.2762 - val_accuracy: 0.9239
Epoch 196/200
28/28 [==============================] - 16s 576ms/step - loss: 0.1009 - accuracy: 0.9674 - val_loss: 0.2894 - val_accuracy: 0.9244
Epoch 197/200
28/28 [==============================] - 16s 563ms/step - loss: 0.1073 - accuracy: 0.9642 - val_loss: 0.2834 - val_accuracy: 0.9170
Epoch 198/200
28/28 [==============================] - 15s 547ms/step - loss: 0.1050 - accuracy: 0.9656 - val_loss: 0.3541 - val_accuracy: 0.9021
Epoch 199/200
28/28 [==============================] - 15s 548ms/step - loss: 0.1066 - accuracy: 0.9652 - val_loss: 0.2713 - val_accuracy: 0.9239
Epoch 200/200
28/28 [==============================] - 15s 544ms/step - loss: 0.0989 - accuracy: 0.9684 - val_loss: 0.2938 - val_accuracy: 0.9170
Training completed in time:  1:19:05.880745
In [116]:
# Evaluating the model on the training and testing set
cnn_acc_train = model.evaluate(x_train_2D, y_train_2D, verbose=0)
print("Training Accuracy: ", cnn_acc_train[1])

cnn_acc_test = model.evaluate(x_test_2D, y_test_2D, verbose=0)
print("Testing Accuracy: ", cnn_acc_test[1])
Training Accuracy:  0.9958482384681702
Testing Accuracy:  0.9170005917549133
In [121]:
plot_accuracy(history,num_epochs,num_batch_size)
In [122]:
plot_losses(history,num_epochs)
In [123]:
model_json = model.to_json()
with open("model.json", "w") as json_file:
    json_file.write(model_json)
# serialize weights to HDF5
model.save_weights("model.h5")
In [128]:
# load json and create model
json_file = open('model.json', 'r')
loaded_model_json = json_file.read()
json_file.close()
loaded_model = model_from_json(loaded_model_json)
# load weights into new model
loaded_model.load_weights("model.h5")
In [125]:
loaded_model.compile(loss='categorical_crossentropy', metrics=['accuracy'], optimizer='adam')
score = loaded_model.evaluate(x_test_2D, y_test_2D, verbose=0)
print("%s: %.2f%%" % (loaded_model.metrics_names[1], score[1]*100))
accuracy: 91.70%
In [166]:
 
In [157]:
y_pred_2D = model.predict(x_test_2D)
55/55 [==============================] - 3s 24ms/step
In [204]:
def extract_features(file_name, max_pad=174, n=40):
    try:
        audio, sample_rate = librosa.load(file_name, res_type='kaiser_fast') 
        mfccs = librosa.feature.mfcc(y=audio, sr=sample_rate, n_mfcc=n)
        
        # padding
        pad_width = max_pad - mfccs.shape[1]
        mfccs_pad = np.pad(mfccs, pad_width=((0, 0), (0, pad_width)), mode='constant')
        mfccsscaled = np.mean(mfccs.T, axis=0)
        
    except Exception as e:
        print("Error encountered while parsing file:", file_name)
        return None 
     
    return mfccs, mfccs_pad, mfccsscaled

class_names= ['air_conditioner', 'car_horn','car_horn','children_playing','dog_bark','drilling','engine_idling','gun_shot','jackhammer','siren','street_music', ]
label_encoder_2D = LabelEncoder()
label_encoder_2D.fit(class_names)



def predict(file_name, max_pad=174, n=40):
    pred_features, pred_features_pad, pred_features_scaled = extract_features(file_name, max_pad, n)
    pred_feature = pred_features_pad.reshape(1, num_rows, num_columns, num_channels)

    pred_v = model.predict(pred_feature)
    preditcted_classes=np.argmax(pred_v, axis=1)
    pred_classes = label_encoder_2D.inverse_transform(preditcted_classes)
    print("The predicted class is:", pred_classes[0], '\n') 

#     predicted_proba_vector = model.predict_proba(pred_feature)
#     predicted_proba = predicted_proba_vector[0]
#     for i in range(len(predicted_proba)):
#         category = le.inverse_transform(np.array([i]))
#         print(category[0], "\t\t : ", format(predicted_proba[i], '.32f'))
    predicted_proba = pred_v[0]  # Get predicted probabilities directly from the predict method
    for i, prob in enumerate(predicted_proba):
        category = label_encoder_2D.inverse_transform(np.array([i]))
        print(category[0], "\t\t : ", format(prob, '.32f'))
        
        
predict('E:/Projects/Audio Classification/Audio_Classification_4_using_models_(Urbansound8 dataset)/Sample_sound/jackhammer.wav', max_pad=174, n=40)
ipd.Audio('E:/Projects/Audio Classification/Audio_Classification_4_using_models_(Urbansound8 dataset)/Sample_sound/jackhammer.wav')
1/1 [==============================] - 0s 37ms/step
The predicted class is: jackhammer 

air_conditioner 		 :  0.00000085888774492559605278074741
car_horn 		 :  0.00007206947339000180363655090332
children_playing 		 :  0.00000000190029836311111921531847
dog_bark 		 :  0.00000049399449153497698716819286
drilling 		 :  0.00069468154106289148330688476562
engine_idling 		 :  0.00000084055091065238229930400848
gun_shot 		 :  0.00000000169058578247671675853780
jackhammer 		 :  0.99922776222229003906250000000000
siren 		 :  0.00000259413604908331762999296188
street_music 		 :  0.00000073191415594919817522168159
Out[204]:
Your browser does not support the audio element.
In [205]:
predict('E:/Projects/Audio Classification/Audio_Classification_4_using_models_(Urbansound8 dataset)/Sample_sound/drilling.wav', max_pad=174, n=40)
ipd.Audio('E:/Projects/Audio Classification/Audio_Classification_4_using_models_(Urbansound8 dataset)/Sample_sound/drilling.wav')
1/1 [==============================] - 0s 39ms/step
The predicted class is: drilling 

air_conditioner 		 :  0.00000000000000335794904049121525
car_horn 		 :  0.00000000000017995401989292364453
children_playing 		 :  0.00000000000000001570934270065795
dog_bark 		 :  0.00000000000011252517608017001405
drilling 		 :  1.00000000000000000000000000000000
engine_idling 		 :  0.00000000000000000362152121243934
gun_shot 		 :  0.00000000000000195978796511703680
jackhammer 		 :  0.00000000005411809542166245989847
siren 		 :  0.00000000000005004095485964914214
street_music 		 :  0.00000000000000000051503403054000
Out[205]:
Your browser does not support the audio element.

Predicting the class of other audios¶

In [195]:
predict('E:/Projects/Audio Classification/Audio_Classification_4_using_models_(Urbansound8 dataset)/Audio_dataset/fold4/22883-7-71-0.wav', max_pad=174, n=40)
1/1 [==============================] - 0s 33ms/step
The predicted class is: jackhammer 

air_conditioner 		 :  0.00000048977466349242604337632656
car_horn 		 :  0.00000689605349180055782198905945
children_playing 		 :  0.00000000001413362309132804028877
dog_bark 		 :  0.00000000074528205828983118408360
drilling 		 :  0.00000039481821545450657140463591
engine_idling 		 :  0.00000064945828626150614582002163
gun_shot 		 :  0.00000000011368068292272326402781
jackhammer 		 :  0.99999165534973144531250000000000
siren 		 :  0.00000001686096595676644938066602
street_music 		 :  0.00000000114699827413744515069993
In [187]:
## checking whether the path exists or not
os.path.exists('E:/Projects/Audio Classification/Audio_Classification_4_using_models_(Urbansound8 dataset)/electric-drill-01.wav')
Out[187]:
True

Confusion Matrix for CNN Model¶

In [203]:
# Convert one-hot encoded predictions back to class indices
class_names = meta_data.groupby(['class', 'classID'], as_index = False).sum()['class']
y_pred_indices = np.argmax(y_pred_2D, axis=1)

# Convert one-hot encoded true labels back to class indices
y_true_indices = np.argmax(y_test_2D, axis=1)

# Call the plot_confusion_matrix function with class indices
plot_confusion_matrix(y_true_indices, y_pred_indices, classes=class_names, cmap=plt.cm.Blues)
Out[203]:
<Axes: title={'center': 'Confusion matrix, without normalization'}, xlabel='Predicted label', ylabel='True label'>
In [201]:
num_classes = len(np.unique(y_pred_indices))
print(num_classes)
10

Results¶

Model Name Training Accuracy(%) Testing Accuracy
Random Forest Classification 57.12 53.34
Support Vector Machine(SVM) 99.87 89.81
Multi-Layer Perceptron 82.10 76.13
Convolutional Neural Network 99.58 91.70

CNN Model wins the battle¶

In [ ]: